diff --git a/models/ResNet50/class_names.json b/models/ResNet50/class_names.json deleted file mode 100644 index 57874547cdef98b4f08dd1eebb09a2dcedc3ed4e..0000000000000000000000000000000000000000 --- a/models/ResNet50/class_names.json +++ /dev/null @@ -1 +0,0 @@ -["Abo", "Abra", "Akwakwak", "Alakazam", "Amonistar", "Amonita", "Aquali", "Arbok", "Arcanin", "Artikodin", "Aspicot", "A\u00e9romite", "Boustiflor", "Bulbizarre", "Caninos", "Carabaffe", "Carapuce", "Chenipan", "Chrysacier", "Ch\u00e9tiflor", "Coconfort", "Colossinge", "Crustabri", "Dardargnan", "Dodrio", "Doduo", "Dracaufeu", "Draco", "Dracolosse", "Ectoplasma", "Empiflor", "Excelangue", "Fantominus", "Farfetchd", "Feunard", "Flagadoss", "Florizarre", "F\u00e9rosinge", "Galopa", "Goupix", "Gravalanch", "Grodoudou", "Grolem", "Grotadmorv", "Herbizarre", "Hypnomade", "Hypoc\u00e9an", "Hypotrempe", "Ins\u00e9cateur", "Kabuto", "Kabutops", "Kadabra", "Kangourex", "Kicklee", "Kokiyas", "Krabboss", "Krabby", "Lamantine", "Leveinard", "Lippoutou", "Lokhlass", "L\u00e9viator", "M. Mime", "Machoc", "Machopeur", "Mackogneur", "Magicarpe", "Magmar", "Magn\u00e9ti", "Magn\u00e9ton", "Mew", "Mewtwo", "Miaouss", "Mimitoss", "Minidraco", "Mystherbe", "M\u00e9lodelfe", "M\u00e9lof\u00e9e", "M\u00e9tamorph", "Nidoking", "Nidoqueen", "Nidoran_femelle", "Nidoran_male", "Nidorina", "Nidorino", "Noadkoko", "Noeunoeuf", "Nosferalto", "Nosferapti", "Onix", "Ortide", "Ossatueur", "Osselait", "Otaria", "Papilusion", "Paras", "Parasect", "Persian", "Piafabec", "Pikachu", "Poissir\u00e8ne", "Poissoroy", "Ponyta", "Porygon", "Psykokwak", "Ptitard", "Pt\u00e9ra", "Pyroli", "Racaillou", "Rafflesia", "Raichu", "Ramoloss", "Rapasdepic", "Rattata", "Rattatac", "Reptincel", "Rhinocorne", "Rhinof\u00e9ros", "Rondoudou", "Ronflex", "Roucarnage", "Roucool", "Roucoups", "Sabelette", "Sablaireau", "Salam\u00e8che", "Saquedeneu", "Scarabrute", "Smogo", "Smogogo", "Soporifik", "Spectrum", "Stari", "Staross", "Sulfura", "Tadmorv", "Tartard", "Taupiqueur", "Tauros", "Tentacool", "Tentacruel", "Tortank", "Triopikeur", "Tygnon", "T\u00eatarte", "Voltali", "Voltorbe", "\u00c9lecthor", "\u00c9lectrode", "\u00c9lektek", "\u00c9voli"] \ No newline at end of file diff --git a/models/ResNet50/pokedex_ResNet50.h5 b/models/ResNet50/pokedex_ResNet50.h5 deleted file mode 100644 index a126216f80da6a9506e385e5c7b060c7471179c7..0000000000000000000000000000000000000000 Binary files a/models/ResNet50/pokedex_ResNet50.h5 and /dev/null differ diff --git a/models/ResNet50/saved_model/fingerprint.pb b/models/ResNet50/saved_model/fingerprint.pb deleted file mode 100644 index 2378f7e80c93a3c4204936a29dfbf4cd872d4a8c..0000000000000000000000000000000000000000 --- a/models/ResNet50/saved_model/fingerprint.pb +++ /dev/null @@ -1 +0,0 @@ -�٭���������������������� �������(�ɔ�����2 \ No newline at end of file diff --git a/models/ResNet50/saved_model/saved_model.pb b/models/ResNet50/saved_model/saved_model.pb deleted file mode 100644 index 5bacac02853fc4439551aaa220bb5392d186b2c5..0000000000000000000000000000000000000000 Binary files a/models/ResNet50/saved_model/saved_model.pb and /dev/null differ diff --git a/models/ResNet50/saved_model/variables/variables.data-00000-of-00001 b/models/ResNet50/saved_model/variables/variables.data-00000-of-00001 deleted file mode 100644 index 62847711cfdef2a5814449403869126581437a30..0000000000000000000000000000000000000000 Binary files a/models/ResNet50/saved_model/variables/variables.data-00000-of-00001 and /dev/null differ diff --git a/models/ResNet50/saved_model/variables/variables.index b/models/ResNet50/saved_model/variables/variables.index deleted file mode 100644 index bd1ff13d8c2f3c0a8473c14290ecd4d00c82f28a..0000000000000000000000000000000000000000 Binary files a/models/ResNet50/saved_model/variables/variables.index and /dev/null differ diff --git a/models/Xception/pokedex_Xception.h5 b/models/Xception/pokedex_Xception.h5 deleted file mode 100644 index a2efede14ecec2aa20f1b5624c9efdf946529e71..0000000000000000000000000000000000000000 Binary files a/models/Xception/pokedex_Xception.h5 and /dev/null differ diff --git a/models/Xception/saved_model/fingerprint.pb b/models/Xception/saved_model/fingerprint.pb deleted file mode 100644 index 26ad768edd58b0a52130abab08c00ee2282c3862..0000000000000000000000000000000000000000 Binary files a/models/Xception/saved_model/fingerprint.pb and /dev/null differ diff --git a/models/Xception/saved_model/saved_model.pb b/models/Xception/saved_model/saved_model.pb deleted file mode 100644 index 26317ea4520cbcfb121998839db71b0e8ee25bf4..0000000000000000000000000000000000000000 Binary files a/models/Xception/saved_model/saved_model.pb and /dev/null differ diff --git a/models/Xception/saved_model/variables/variables.data-00000-of-00001 b/models/Xception/saved_model/variables/variables.data-00000-of-00001 deleted file mode 100644 index bc96ae0060135c6077285b3134830352cf691a43..0000000000000000000000000000000000000000 Binary files a/models/Xception/saved_model/variables/variables.data-00000-of-00001 and /dev/null differ diff --git a/models/Xception/saved_model/variables/variables.index b/models/Xception/saved_model/variables/variables.index deleted file mode 100644 index a8dbb599d87680d6232a2c222a71b354595e6d12..0000000000000000000000000000000000000000 Binary files a/models/Xception/saved_model/variables/variables.index and /dev/null differ diff --git a/slurm/ResNet50_16519109.out b/slurm/ResNet50_16519109.out new file mode 100644 index 0000000000000000000000000000000000000000..7360036e5bc8c739bc9ae934715bae5ed4568304 --- /dev/null +++ b/slurm/ResNet50_16519109.out @@ -0,0 +1,14 @@ +2025-04-09 18:20:13.977433: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA +To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. +2025-04-09 18:20:19.184935: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.224471: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.224774: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.225595: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA +To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. +2025-04-09 18:20:19.225943: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.226273: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.226482: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.441911: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.442190: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.442518: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:20:19.442797: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1613] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 79383 MB memory: -> device: 0, name: NVIDIA A100 80GB PCIe, pci bus id: 0000:81:00.0, compute capability: 8.0 diff --git a/slurm/Xception_16519103.out b/slurm/Xception_16519103.out new file mode 100644 index 0000000000000000000000000000000000000000..4e095aeacfa1613b00b36a89c116f30c82b789b6 --- /dev/null +++ b/slurm/Xception_16519103.out @@ -0,0 +1,352 @@ +2025-04-09 18:14:12.010126: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA +To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. +2025-04-09 18:14:16.572607: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.611967: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.612273: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.613091: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA +To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. +2025-04-09 18:14:16.613286: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.613513: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.613720: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.831847: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.832119: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.832452: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero +2025-04-09 18:14:16.832729: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1613] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 79383 MB memory: -> device: 0, name: NVIDIA A100 80GB PCIe, pci bus id: 0000:81:00.0, compute capability: 8.0 +2025-04-09 18:15:31.217324: W tensorflow/core/lib/png/png_io.cc:88] PNG warning: iCCP: known incorrect sRGB profile +2025-04-09 18:16:19.014768: W tensorflow/core/lib/png/png_io.cc:88] PNG warning: iCCP: known incorrect sRGB profile +WARNING:tensorflow:From /opt/ebsofts/TensorFlow/2.11.0-foss-2022a-CUDA-11.7.0/lib/python3.10/site-packages/tensorflow/python/autograph/pyct/static_analysis/liveness.py:83: Analyzer.lamba_check (from tensorflow.python.autograph.pyct.static_analysis.liveness) is deprecated and will be removed after 2023-09-23. +Instructions for updating: +Lambda fuctions will be no more assumed to be used in the statement where they are used, or at least in the same block. https://github.com/tensorflow/tensorflow/issues/56089 +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting ImageProjectiveTransformV3 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting ImageProjectiveTransformV3 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformFullIntV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomGetKeyCounter cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting ImageProjectiveTransformV3 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting ImageProjectiveTransformV3 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting RngReadAndSkip cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting Bitcast cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformFullIntV2 cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomGetKeyCounter cause there is no registered converter for this op. +WARNING:tensorflow:Using a while_loop for converting StatelessRandomUniformV2 cause there is no registered converter for this op. +2025-04-09 18:16:36.185973: W tensorflow/core/grappler/optimizers/data/auto_shard.cc:784] AUTO sharding policy will apply DATA sharding policy as it failed to apply FILE sharding policy because of the following reason: Found an unshardable source dataset: name: "TensorSliceDataset/_1" +op: "TensorSliceDataset" +input: "Placeholder/_0" +attr { + key: "Toutput_types" + value { + list { + type: DT_STRING + } + } +} +attr { + key: "_cardinality" + value { + i: 20409 + } +} +attr { + key: "is_files" + value { + b: false + } +} +attr { + key: "metadata" + value { + s: "\n\024TensorSliceDataset:0" + } +} +attr { + key: "output_shapes" + value { + list { + shape { + } + } + } +} +attr { + key: "replicate_on_split" + value { + b: false + } +} +experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } +} + +Number of GPUs: 1 +Found 25511 files belonging to 151 classes. +Using 20409 files for training. +Found 25511 files belonging to 151 classes. +Using 5102 files for validation. +Detected 151 Pokémon classes. +Computing class weights... +Class weights ready. +Unique labels in training set: [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 + 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 + 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 + 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 + 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 + 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 + 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 + 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 + 144 145 146 147 148 149 150] +Class Names (index -> name): +0: Abo +1: Abra +2: Akwakwak +3: Alakazam +4: Amonistar +5: Amonita +6: Aquali +7: Arbok +8: Arcanin +9: Artikodin +10: Aspicot +11: Aéromite +12: Boustiflor +13: Bulbizarre +14: Caninos +15: Carabaffe +16: Carapuce +17: Chenipan +18: Chrysacier +19: Chétiflor +20: Coconfort +21: Colossinge +22: Crustabri +23: Dardargnan +24: Dodrio +25: Doduo +26: Dracaufeu +27: Draco +28: Dracolosse +29: Ectoplasma +30: Empiflor +31: Excelangue +32: Fantominus +33: Farfetchd +34: Feunard +35: Flagadoss +36: Florizarre +37: Férosinge +38: Galopa +39: Goupix +40: Gravalanch +41: Grodoudou +42: Grolem +43: Grotadmorv +44: Herbizarre +45: Hypnomade +46: Hypocéan +47: Hypotrempe +48: Insécateur +49: Kabuto +50: Kabutops +51: Kadabra +52: Kangourex +53: Kicklee +54: Kokiyas +55: Krabboss +56: Krabby +57: Lamantine +58: Leveinard +59: Lippoutou +60: Lokhlass +61: Léviator +62: M. Mime +63: Machoc +64: Machopeur +65: Mackogneur +66: Magicarpe +67: Magmar +68: Magnéti +69: Magnéton +70: Mew +71: Mewtwo +72: Miaouss +73: Mimitoss +74: Minidraco +75: Mystherbe +76: Mélodelfe +77: Mélofée +78: Métamorph +79: Nidoking +80: Nidoqueen +81: Nidoran_femelle +82: Nidoran_male +83: Nidorina +84: Nidorino +85: Noadkoko +86: Noeunoeuf +87: Nosferalto +88: Nosferapti +89: Onix +90: Ortide +91: Ossatueur +92: Osselait +93: Otaria +94: Papilusion +95: Paras +96: Parasect +97: Persian +98: Piafabec +99: Pikachu +100: Poissirène +101: Poissoroy +102: Ponyta +103: Porygon +104: Psykokwak +105: Ptitard +106: Ptéra +107: Pyroli +108: Racaillou +109: Rafflesia +110: Raichu +111: Ramoloss +112: Rapasdepic +113: Rattata +114: Rattatac +115: Reptincel +116: Rhinocorne +117: Rhinoféros +118: Rondoudou +119: Ronflex +120: Roucarnage +121: Roucool +122: Roucoups +123: Sabelette +124: Sablaireau +125: Salamèche +126: Saquedeneu +127: Scarabrute +128: Smogo +129: Smogogo +130: Soporifik +131: Spectrum +132: Stari +133: Staross +134: Sulfura +135: Tadmorv +136: Tartard +137: Taupiqueur +138: Tauros +139: Tentacool +140: Tentacruel +141: Tortank +142: Triopikeur +143: Tygnon +144: Têtarte +145: Voltali +146: Voltorbe +147: Électhor +148: Électrode +149: Élektek +150: Évoli +Epoch 1/20 +2025-04-09 18:16:45.917455: I tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:428] Loaded cuDNN version 8401 +2025-04-09 18:16:50.308046: I tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:630] TensorFloat-32 will be used for the matrix multiplication. This will only be logged once. +2025-04-09 18:16:50.336746: I tensorflow/compiler/xla/service/service.cc:173] XLA service 0x14de1ef42810 initialized for platform CUDA (this does not guarantee that XLA will be used). Devices: +2025-04-09 18:16:50.336763: I tensorflow/compiler/xla/service/service.cc:181] StreamExecutor device (0): NVIDIA A100 80GB PCIe, Compute Capability 8.0 +2025-04-09 18:16:50.406696: I tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.cc:268] disabling MLIR crash reproducer, set env var `MLIR_CRASH_REPRODUCER_DIRECTORY` to enable. +2025-04-09 18:16:50.833080: I tensorflow/compiler/jit/xla_compilation_cache.cc:477] Compiled cluster using XLA! This line is logged at most once for the lifetime of the process. + 1/638 [..............................] - ETA: 3:22:15 - loss: 5.1084 - accuracy: 0.0000e+00 2/638 [..............................] - ETA: 54s - loss: 5.3410 - accuracy: 0.0000e+00 3/638 [..............................] - ETA: 53s - loss: 5.2624 - accuracy: 0.0000e+00 4/638 [..............................] - ETA: 53s - loss: 5.2625 - accuracy: 0.0156 5/638 [..............................] - ETA: 52s - loss: 5.2983 - accuracy: 0.0250 6/638 [..............................] - ETA: 52s - loss: 5.3428 - accuracy: 0.0208 7/638 [..............................] - ETA: 52s - loss: 5.3869 - accuracy: 0.0223 8/638 [..............................] - ETA: 59s - loss: 5.4310 - accuracy: 0.0234 9/638 [..............................] - ETA: 1:13 - loss: 5.4144 - accuracy: 0.0208 10/638 [..............................] - ETA: 1:23 - loss: 5.4346 - accuracy: 0.0188 11/638 [..............................] - ETA: 1:34 - loss: 5.4806 - accuracy: 0.0199 12/638 [..............................] - ETA: 1:41 - loss: 5.4661 - accuracy: 0.0208 13/638 [..............................] - ETA: 1:46 - loss: 5.4505 - accuracy: 0.0216 14/638 [..............................] - ETA: 1:49 - loss: 5.4635 - accuracy: 0.0201 15/638 [..............................] - ETA: 1:54 - loss: 5.4814 - accuracy: 0.0208 16/638 [..............................] - ETA: 1:58 - loss: 5.4691 - accuracy: 0.0234 17/638 [..............................] - ETA: 2:01 - loss: 5.4608 - accuracy: 0.0239 18/638 [..............................] - ETA: 2:04 - loss: 5.4555 - accuracy: 0.0226 19/638 [..............................] - ETA: 2:06 - loss: 5.4504 - accuracy: 0.0230 20/638 [..............................] - ETA: 2:08 - loss: 5.4440 - accuracy: 0.0234 21/638 [..............................] - ETA: 2:10 - loss: 5.4201 - accuracy: 0.0223 22/638 [>.............................] - ETA: 2:11 - loss: 5.4159 - accuracy: 0.0213 23/638 [>.............................] - ETA: 2:12 - loss: 5.3900 - accuracy: 0.0245 24/638 [>.............................] - ETA: 2:13 - loss: 5.4015 - accuracy: 0.0234 25/638 [>.............................] - ETA: 2:15 - loss: 5.3799 - accuracy: 0.0262 26/638 [>.............................] - ETA: 2:16 - loss: 5.3587 - accuracy: 0.0252 27/638 [>.............................] - ETA: 2:17 - loss: 5.3371 - accuracy: 0.0243 28/638 [>.............................] - ETA: 2:17 - loss: 5.2997 - accuracy: 0.0234 29/638 [>.............................] - ETA: 2:18 - loss: 5.3075 - accuracy: 0.0226 30/638 [>.............................] - ETA: 2:18 - loss: 5.3082 - accuracy: 0.0229 31/638 [>.............................] - ETA: 2:19 - loss: 5.2822 - accuracy: 0.0242 32/638 [>.............................] - ETA: 2:19 - loss: 5.2676 - accuracy: 0.0234 33/638 [>.............................] - ETA: 2:19 - loss: 5.2583 - accuracy: 0.0237 34/638 [>.............................] - ETA: 2:20 - loss: 5.2379 - accuracy: 0.0239 35/638 [>.............................] - ETA: 2:20 - loss: 5.2289 - accuracy: 0.0259 36/638 [>.............................] - ETA: 2:20 - loss: 5.2263 - accuracy: 0.0260 37/638 [>.............................] - ETA: 2:20 - loss: 5.2156 - accuracy: 0.0253 38/638 [>.............................] - ETA: 2:21 - loss: 5.2029 - accuracy: 0.0247 39/638 [>.............................] - ETA: 2:21 - loss: 5.1938 - accuracy: 0.0264 40/638 [>.............................] - ETA: 2:21 - loss: 5.1845 - accuracy: 0.0266 41/638 [>.............................] - ETA: 2:21 - loss: 5.1801 - accuracy: 0.0267 42/638 [>.............................] - ETA: 2:21 - loss: 5.1841 - accuracy: 0.0260 43/638 [=>............................] - ETA: 2:21 - loss: 5.1784 - accuracy: 0.0262 44/638 [=>............................] - ETA: 2:21 - loss: 5.1588 - accuracy: 0.0277 45/638 [=>............................] - ETA: 2:21 - loss: 5.1535 - accuracy: 0.0271 46/638 [=>............................] - ETA: 2:21 - loss: 5.1449 - accuracy: 0.0279 47/638 [=>............................] - ETA: 2:21 - loss: 5.1421 - accuracy: 0.0293 48/638 [=>............................] - ETA: 2:21 - loss: 5.1261 - accuracy: 0.0293 49/638 [=>............................] - ETA: 2:21 - loss: 5.1225 - accuracy: 0.0293 50/638 [=>............................] - ETA: 2:21 - loss: 5.1141 - accuracy: 0.0294 51/638 [=>............................] - ETA: 2:21 - loss: 5.1117 - accuracy: 0.0288 52/638 [=>............................] - ETA: 2:22 - loss: 5.0960 - accuracy: 0.0300 53/638 [=>............................] - ETA: 2:22 - loss: 5.0929 - accuracy: 0.0301 54/638 [=>............................] - ETA: 2:22 - loss: 5.0743 - accuracy: 0.0312 55/638 [=>............................] - ETA: 2:22 - loss: 5.0699 - accuracy: 0.0324 56/638 [=>............................] - ETA: 2:22 - loss: 5.0599 - accuracy: 0.0324 57/638 [=>............................] - ETA: 2:22 - loss: 5.0524 - accuracy: 0.0329 58/638 [=>............................] - ETA: 2:21 - loss: 5.0496 - accuracy: 0.0323 59/638 [=>............................] - ETA: 2:21 - loss: 5.0452 - accuracy: 0.0323 60/638 [=>............................] - ETA: 2:21 - loss: 5.0351 - accuracy: 0.0328 61/638 [=>............................] - ETA: 2:21 - loss: 5.0222 - accuracy: 0.0328 62/638 [=>............................] - ETA: 2:21 - loss: 5.0207 - accuracy: 0.0323 63/638 [=>............................] - ETA: 2:22 - loss: 5.0203 - accuracy: 0.0317 64/638 [==>...........................] - ETA: 2:21 - loss: 5.0069 - accuracy: 0.0322 65/638 [==>...........................] - ETA: 2:21 - loss: 4.9901 - accuracy: 0.0337 66/638 [==>...........................] - ETA: 2:21 - loss: 4.9794 - accuracy: 0.0346 67/638 [==>...........................] - ETA: 2:21 - loss: 4.9687 - accuracy: 0.0345 68/638 [==>...........................] - ETA: 2:21 - loss: 4.9615 - accuracy: 0.0345 69/638 [==>...........................] - ETA: 2:21 - loss: 4.9520 - accuracy: 0.0353 70/638 [==>...........................] - ETA: 2:21 - loss: 4.9554 - accuracy: 0.0353 71/638 [==>...........................] - ETA: 2:20 - loss: 4.9460 - accuracy: 0.0348 72/638 [==>...........................] - ETA: 2:20 - loss: 4.9443 - accuracy: 0.0352 73/638 [==>...........................] - ETA: 2:20 - loss: 4.9416 - accuracy: 0.0360 74/638 [==>...........................] - ETA: 2:20 - loss: 4.9374 - accuracy: 0.0359 75/638 [==>...........................] - ETA: 2:20 - loss: 4.9302 - accuracy: 0.0367 76/638 [==>...........................] - ETA: 2:20 - loss: 4.9309 - accuracy: 0.0374 77/638 [==>...........................] - ETA: 2:19 - loss: 4.9234 - accuracy: 0.0377 78/638 [==>...........................] - ETA: 2:19 - loss: 4.9196 - accuracy: 0.0377 79/638 [==>...........................] - ETA: 2:19 - loss: 4.9204 - accuracy: 0.0372 80/638 [==>...........................] - ETA: 2:19 - loss: 4.9100 - accuracy: 0.0375 81/638 [==>...........................] - ETA: 2:19 - loss: 4.9041 - accuracy: 0.0370 82/638 [==>...........................] - ETA: 2:19 - loss: 4.9017 - accuracy: 0.0373 83/638 [==>...........................] - ETA: 2:19 - loss: 4.8957 - accuracy: 0.0380 84/638 [==>...........................] - ETA: 2:18 - loss: 4.8913 - accuracy: 0.0383 85/638 [==>...........................] - ETA: 2:18 - loss: 4.8898 - accuracy: 0.0390 86/638 [===>..........................] - ETA: 2:18 - loss: 4.8836 - accuracy: 0.0392 87/638 [===>..........................] - ETA: 2:18 - loss: 4.8749 - accuracy: 0.0402 88/638 [===>..........................] - ETA: 2:18 - loss: 4.8671 - accuracy: 0.0405 89/638 [===>..........................] - ETA: 2:18 - loss: 4.8626 - accuracy: 0.0400 90/638 [===>..........................] - ETA: 2:18 - loss: 4.8616 - accuracy: 0.0396 91/638 [===>..........................] - ETA: 2:17 - loss: 4.8582 - accuracy: 0.0398 92/638 [===>..........................] - ETA: 2:17 - loss: 4.8539 - accuracy: 0.0404 93/638 [===>..........................] - ETA: 2:17 - loss: 4.8514 - accuracy: 0.0403 94/638 [===>..........................] - ETA: 2:17 - loss: 4.8467 - accuracy: 0.0402 95/638 [===>..........................] - ETA: 2:17 - loss: 4.8450 - accuracy: 0.0405 96/638 [===>..........................] - ETA: 2:17 - loss: 4.8407 - accuracy: 0.0407 97/638 [===>..........................] - ETA: 2:16 - loss: 4.8336 - accuracy: 0.0409 98/638 [===>..........................] - ETA: 2:16 - loss: 4.8270 - accuracy: 0.0411 99/638 [===>..........................] - ETA: 2:16 - loss: 4.8233 - accuracy: 0.0417 100/638 [===>..........................] - ETA: 2:16 - loss: 4.8159 - accuracy: 0.0425 101/638 [===>..........................] - ETA: 2:16 - loss: 4.8122 - accuracy: 0.0430 102/638 [===>..........................] - ETA: 2:15 - loss: 4.8089 - accuracy: 0.0429 103/638 [===>..........................] - ETA: 2:15 - loss: 4.8028 - accuracy: 0.0428 104/638 [===>..........................] - ETA: 2:15 - loss: 4.7976 - accuracy: 0.0433 105/638 [===>..........................] - ETA: 2:15 - loss: 4.7892 - accuracy: 0.0429 106/638 [===>..........................] - ETA: 2:15 - loss: 4.7866 - accuracy: 0.0425 107/638 [====>.........................] - ETA: 2:15 - loss: 4.7823 - accuracy: 0.0426 108/638 [====>.........................] - ETA: 2:14 - loss: 4.7777 - accuracy: 0.0428 109/638 [====>.........................] - ETA: 2:14 - loss: 4.7759 - accuracy: 0.0430 110/638 [====>.........................] - ETA: 2:14 - loss: 4.7719 - accuracy: 0.0432 111/638 [====>.........................] - ETA: 2:14 - loss: 4.7687 - accuracy: 0.0431 112/638 [====>.........................] - ETA: 2:14 - loss: 4.7657 - accuracy: 0.0427 113/638 [====>.........................] - ETA: 2:13 - loss: 4.7638 - accuracy: 0.0431 114/638 [====>.........................] - ETA: 2:13 - loss: 4.7598 - accuracy: 0.0430 115/638 [====>.........................] - ETA: 2:13 - loss: 4.7595 - accuracy: 0.0427 116/638 [====>.........................] - ETA: 2:13 - loss: 4.7577 - accuracy: 0.0434 117/638 [====>.........................] - ETA: 2:12 - loss: 4.7528 - accuracy: 0.0438 118/638 [====>.........................] - ETA: 2:12 - loss: 4.7522 - accuracy: 0.0437 119/638 [====>.........................] - ETA: 2:12 - loss: 4.7494 - accuracy: 0.0439 120/638 [====>.........................] - ETA: 2:12 - loss: 4.7436 - accuracy: 0.0443 121/638 [====>.........................] - ETA: 2:11 - loss: 4.7405 - accuracy: 0.0449 122/638 [====>.........................] - ETA: 2:11 - loss: 4.7338 - accuracy: 0.0451 123/638 [====>.........................] - ETA: 2:11 - loss: 4.7295 - accuracy: 0.0447 124/638 [====>.........................] - ETA: 2:11 - loss: 4.7226 - accuracy: 0.0449 125/638 [====>.........................] - ETA: 2:10 - loss: 4.7215 - accuracy: 0.0445 126/638 [====>.........................] - ETA: 2:10 - loss: 4.7147 - accuracy: 0.0451 127/638 [====>.........................] - ETA: 2:10 - loss: 4.7121 - accuracy: 0.0453 128/638 [=====>........................] - ETA: 2:10 - loss: 4.7052 - accuracy: 0.0449 129/638 [=====>........................] - ETA: 2:10 - loss: 4.7027 - accuracy: 0.0448 130/638 [=====>........................] - ETA: 2:09 - loss: 4.6969 - accuracy: 0.0447 131/638 [=====>........................] - ETA: 2:09 - loss: 4.6949 - accuracy: 0.0446 132/638 [=====>........................] - ETA: 2:09 - loss: 4.6995 - accuracy: 0.0445 133/638 [=====>........................] - ETA: 2:09 - loss: 4.6903 - accuracy: 0.0456 134/638 [=====>........................] - ETA: 2:08 - loss: 4.6930 - accuracy: 0.0452 135/638 [=====>........................] - ETA: 2:08 - loss: 4.6925 - accuracy: 0.0449 136/638 [=====>........................] - ETA: 2:08 - loss: 4.6913 - accuracy: 0.0448 137/638 [=====>........................] - ETA: 2:08 - loss: 4.6927 - accuracy: 0.0454 138/638 [=====>........................] - ETA: 2:07 - loss: 4.6911 - accuracy: 0.0453 139/638 [=====>........................] - ETA: 2:07 - loss: 4.6943 - accuracy: 0.0452 140/638 [=====>........................] - ETA: 2:07 - loss: 4.6962 - accuracy: 0.0451 141/638 [=====>........................] - ETA: 2:07 - loss: 4.6894 - accuracy: 0.0454 142/638 [=====>........................] - ETA: 2:06 - loss: 4.6864 - accuracy: 0.0458 143/638 [=====>........................] - ETA: 2:06 - loss: 4.6882 - accuracy: 0.0455 144/638 [=====>........................] - ETA: 2:06 - loss: 4.6841 - accuracy: 0.0456 145/638 [=====>........................] - ETA: 2:06 - loss: 4.6814 - accuracy: 0.0459 146/638 [=====>........................] - ETA: 2:06 - loss: 4.6794 - accuracy: 0.0462 147/638 [=====>........................] - ETA: 2:05 - loss: 4.6748 - accuracy: 0.0472 148/638 [=====>........................] - ETA: 2:05 - loss: 4.6715 - accuracy: 0.0479 149/638 [======>.......................] - ETA: 2:05 - loss: 4.6692 - accuracy: 0.0478 150/638 [======>.......................] - ETA: 2:05 - loss: 4.6690 - accuracy: 0.0477 151/638 [======>.......................] - ETA: 2:04 - loss: 4.6648 - accuracy: 0.0476 152/638 [======>.......................] - ETA: 2:04 - loss: 4.6603 - accuracy: 0.0481 153/638 [======>.......................] - ETA: 2:04 - loss: 4.6610 - accuracy: 0.0482 154/638 [======>.......................] - ETA: 2:04 - loss: 4.6588 - accuracy: 0.0481 155/638 [======>.......................] - ETA: 2:03 - loss: 4.6550 - accuracy: 0.0480 156/638 [======>.......................] - ETA: 2:03 - loss: 4.6518 - accuracy: 0.0481 157/638 [======>.......................] - ETA: 2:03 - loss: 4.6454 - accuracy: 0.0494 158/638 [======>.......................] - ETA: 2:03 - loss: 4.6416 - accuracy: 0.0496 159/638 [======>.......................] - ETA: 2:03 - loss: 4.6399 - accuracy: 0.0497 160/638 [======>.......................] - ETA: 2:03 - loss: 4.6340 - accuracy: 0.0500 161/638 [======>.......................] - ETA: 2:02 - loss: 4.6284 - accuracy: 0.0505 162/638 [======>.......................] - ETA: 2:02 - loss: 4.6246 - accuracy: 0.0503 163/638 [======>.......................] - ETA: 2:02 - loss: 4.6230 - accuracy: 0.0504 164/638 [======>.......................] - ETA: 2:02 - loss: 4.6205 - accuracy: 0.0501 165/638 [======>.......................] - ETA: 2:02 - loss: 4.6203 - accuracy: 0.0502 166/638 [======>.......................] - ETA: 2:01 - loss: 4.6186 - accuracy: 0.0499 167/638 [======>.......................] - ETA: 2:01 - loss: 4.6142 - accuracy: 0.0503 168/638 [======>.......................] - ETA: 2:01 - loss: 4.6100 - accuracy: 0.0510 169/638 [======>.......................] - ETA: 2:01 - loss: 4.6095 - accuracy: 0.0510 170/638 [======>.......................] - ETA: 2:00 - loss: 4.6085 - accuracy: 0.0511 171/638 [=======>......................] - ETA: 2:00 - loss: 4.6040 - accuracy: 0.0514 172/638 [=======>......................] - ETA: 2:00 - loss: 4.6016 - accuracy: 0.0518 173/638 [=======>......................] - ETA: 1:59 - loss: 4.5986 - accuracy: 0.0518 174/638 [=======>......................] - ETA: 1:59 - loss: 4.5970 - accuracy: 0.0519 175/638 [=======>......................] - ETA: 1:59 - loss: 4.5952 - accuracy: 0.0529 176/638 [=======>......................] - ETA: 1:59 - loss: 4.5929 - accuracy: 0.0527 177/638 [=======>......................] - ETA: 1:58 - loss: 4.5931 - accuracy: 0.0530 178/638 [=======>......................] - ETA: 1:58 - loss: 4.5887 - accuracy: 0.0534 179/638 [=======>......................] - ETA: 1:58 - loss: 4.5838 - accuracy: 0.0534 180/638 [=======>......................] - ETA: 1:58 - loss: 4.5802 - accuracy: 0.0536 181/638 [=======>......................] - ETA: 1:57 - loss: 4.5790 - accuracy: 0.0540 182/638 [=======>......................] - ETA: 1:57 - loss: 4.5784 - accuracy: 0.0541 183/638 [=======>......................] - ETA: 1:57 - loss: 4.5766 - accuracy: 0.0541 184/638 [=======>......................] - ETA: 1:57 - loss: 4.5754 - accuracy: 0.0542 185/638 [=======>......................] - ETA: 1:56 - loss: 4.5735 - accuracy: 0.0544 186/638 [=======>......................] - ETA: 1:56 - loss: 4.5713 - accuracy: 0.0543 187/638 [=======>......................] - ETA: 1:56 - loss: 4.5690 - accuracy: 0.0546 188/638 [=======>......................] - ETA: 1:56 - loss: 4.5680 - accuracy: 0.0547 189/638 [=======>......................] - ETA: 1:55 - loss: 4.5644 - accuracy: 0.0554 190/638 [=======>......................] - ETA: 1:55 - loss: 4.5682 - accuracy: 0.0553 191/638 [=======>......................] - ETA: 1:55 - loss: 4.5669 - accuracy: 0.0553 192/638 [========>.....................] - ETA: 1:55 - loss: 4.5630 - accuracy: 0.0555 193/638 [========>.....................] - ETA: 1:55 - loss: 4.5617 - accuracy: 0.0555 194/638 [========>.....................] - ETA: 1:54 - loss: 4.5603 - accuracy: 0.0561 195/638 [========>.....................] - ETA: 1:54 - loss: 4.5590 - accuracy: 0.0564 196/638 [========>.....................] - ETA: 1:54 - loss: 4.5586 - accuracy: 0.0563 197/638 [========>.....................] - ETA: 1:54 - loss: 4.5527 - accuracy: 0.0565 198/638 [========>.....................] - ETA: 1:54 - loss: 4.5493 - accuracy: 0.0567 199/638 [========>.....................] - ETA: 1:53 - loss: 4.5463 - accuracy: 0.0573 200/638 [========>.....................] - ETA: 1:53 - loss: 4.5434 - accuracy: 0.0572 201/638 [========>.....................] - ETA: 1:53 - loss: 4.5401 - accuracy: 0.0571 202/638 [========>.....................] - ETA: 1:52 - loss: 4.5389 - accuracy: 0.0568 203/638 [========>.....................] - ETA: 1:52 - loss: 4.5374 - accuracy: 0.0570 204/638 [========>.....................] - ETA: 1:52 - loss: 4.5366 - accuracy: 0.0571 205/638 [========>.....................] - ETA: 1:52 - loss: 4.5329 - accuracy: 0.0578 206/638 [========>.....................] - ETA: 1:52 - loss: 4.5296 - accuracy: 0.0575 207/638 [========>.....................] - ETA: 1:51 - loss: 4.5260 - accuracy: 0.0575 208/638 [========>.....................] - ETA: 1:51 - loss: 4.5222 - accuracy: 0.0577 209/638 [========>.....................] - ETA: 1:51 - loss: 4.5189 - accuracy: 0.0579 210/638 [========>.....................] - ETA: 1:51 - loss: 4.5173 - accuracy: 0.0579 211/638 [========>.....................] - ETA: 1:50 - loss: 4.5161 - accuracy: 0.0582 212/638 [========>.....................] - ETA: 1:50 - loss: 4.5114 - accuracy: 0.0584 213/638 [=========>....................] - ETA: 1:50 - loss: 4.5128 - accuracy: 0.0585 214/638 [=========>....................] - ETA: 1:50 - loss: 4.5121 - accuracy: 0.0584 215/638 [=========>....................] - ETA: 1:49 - loss: 4.5108 - accuracy: 0.0587 216/638 [=========>....................] - ETA: 1:49 - loss: 4.5121 - accuracy: 0.0589 217/638 [=========>....................] - ETA: 1:49 - loss: 4.5092 - accuracy: 0.0589 218/638 [=========>....................] - ETA: 1:49 - loss: 4.5080 - accuracy: 0.0588 219/638 [=========>....................] - ETA: 1:48 - loss: 4.5064 - accuracy: 0.0585 220/638 [=========>....................] - ETA: 1:48 - loss: 4.5053 - accuracy: 0.0587 221/638 [=========>....................] - ETA: 1:48 - loss: 4.5031 - accuracy: 0.0587 222/638 [=========>....................] - ETA: 1:48 - loss: 4.5008 - accuracy: 0.0584 223/638 [=========>....................] - ETA: 1:47 - loss: 4.5001 - accuracy: 0.0584 224/638 [=========>....................] - ETA: 1:47 - loss: 4.4978 - accuracy: 0.0585 225/638 [=========>....................] - ETA: 1:47 - loss: 4.4947 - accuracy: 0.0583 226/638 [=========>....................] - ETA: 1:47 - loss: 4.4926 - accuracy: 0.0584 227/638 [=========>....................] - ETA: 1:46 - loss: 4.4894 - accuracy: 0.0581 228/638 [=========>....................] - ETA: 1:46 - loss: 4.4854 - accuracy: 0.0587 229/638 [=========>....................] - ETA: 1:46 - loss: 4.4839 - accuracy: 0.0585 230/638 [=========>....................] - ETA: 1:45 - loss: 4.4814 - accuracy: 0.0584 231/638 [=========>....................] - ETA: 1:45 - loss: 4.4782 - accuracy: 0.05842025-04-09 18:17:57.661351: W tensorflow/core/lib/png/png_io.cc:88] PNG warning: iCCP: known incorrect sRGB profile + 232/638 [=========>....................] - ETA: 1:45 - loss: 4.4787 - accuracy: 0.0585 233/638 [=========>....................] - ETA: 1:45 - loss: 4.4783 - accuracy: 0.0589 234/638 [==========>...................] - ETA: 1:45 - loss: 4.4792 - accuracy: 0.0592 235/638 [==========>...................] - ETA: 1:44 - loss: 4.4779 - accuracy: 0.0590 236/638 [==========>...................] - ETA: 1:44 - loss: 4.4746 - accuracy: 0.0592 237/638 [==========>...................] - ETA: 1:44 - loss: 4.4737 - accuracy: 0.0589 238/638 [==========>...................] - ETA: 1:44 - loss: 4.4723 - accuracy: 0.0592 239/638 [==========>...................] - ETA: 1:43 - loss: 4.4730 - accuracy: 0.0592 240/638 [==========>...................] - ETA: 1:43 - loss: 4.4691 - accuracy: 0.0599 241/638 [==========>...................] - ETA: 1:43 - loss: 4.4662 - accuracy: 0.0602 242/638 [==========>...................] - ETA: 1:43 - loss: 4.4657 - accuracy: 0.0604 243/638 [==========>...................] - ETA: 1:42 - loss: 4.4615 - accuracy: 0.0608 244/638 [==========>...................] - ETA: 1:42 - loss: 4.4584 - accuracy: 0.0612 245/638 [==========>...................] - ETA: 1:42 - loss: 4.4546 - accuracy: 0.0612 246/638 [==========>...................] - ETA: 1:41 - loss: 4.4516 - accuracy: 0.0612 247/638 [==========>...................] - ETA: 1:41 - loss: 4.4506 - accuracy: 0.0612 248/638 [==========>...................] - ETA: 1:41 - loss: 4.4489 - accuracy: 0.0615 249/638 [==========>...................] - ETA: 1:41 - loss: 4.4454 - accuracy: 0.0616 250/638 [==========>...................] - ETA: 1:40 - loss: 4.4441 - accuracy: 0.0617 251/638 [==========>...................] - ETA: 1:40 - loss: 4.4427 - accuracy: 0.0618 252/638 [==========>...................] - ETA: 1:40 - loss: 4.4410 - accuracy: 0.0620 253/638 [==========>...................] - ETA: 1:40 - loss: 4.4386 - accuracy: 0.0624 254/638 [==========>...................] - ETA: 1:39 - loss: 4.4364 - accuracy: 0.0625 255/638 [==========>...................] - ETA: 1:39 - loss: 4.4347 - accuracy: 0.0625 256/638 [===========>..................] - ETA: 1:39 - loss: 4.4329 - accuracy: 0.0625 257/638 [===========>..................] - ETA: 1:39 - loss: 4.4312 - accuracy: 0.0626 258/638 [===========>..................] - ETA: 1:38 - loss: 4.4264 - accuracy: 0.0625 259/638 [===========>..................] - ETA: 1:38 - loss: 4.4258 - accuracy: 0.0626 260/638 [===========>..................] - ETA: 1:38 - loss: 4.4262 - accuracy: 0.0632 261/638 [===========>..................] - ETA: 1:38 - loss: 4.4240 - accuracy: 0.0631 262/638 [===========>..................] - ETA: 1:37 - loss: 4.4192 - accuracy: 0.0631 263/638 [===========>..................] - ETA: 1:37 - loss: 4.4172 - accuracy: 0.0632 264/638 [===========>..................] - ETA: 1:37 - loss: 4.4131 - accuracy: 0.0636 265/638 [===========>..................] - ETA: 1:37 - loss: 4.4123 - accuracy: 0.0637 266/638 [===========>..................] - ETA: 1:36 - loss: 4.4113 - accuracy: 0.0637 267/638 [===========>..................] - ETA: 1:36 - loss: 4.4100 - accuracy: 0.0637 268/638 [===========>..................] - ETA: 1:36 - loss: 4.4065 - accuracy: 0.0637 269/638 [===========>..................] - ETA: 1:36 - loss: 4.4018 - accuracy: 0.0641 270/638 [===========>..................] - ETA: 1:35 - loss: 4.4017 - accuracy: 0.0644 271/638 [===========>..................] - ETA: 1:35 - loss: 4.4011 - accuracy: 0.0643 272/638 [===========>..................] - ETA: 1:35 - loss: 4.3971 - accuracy: 0.0645 273/638 [===========>..................] - ETA: 1:35 - loss: 4.3973 - accuracy: 0.0643 274/638 [===========>..................] - ETA: 1:34 - loss: 4.3939 - accuracy: 0.0646 275/638 [===========>..................] - ETA: 1:34 - loss: 4.3910 - accuracy: 0.0648 276/638 [===========>..................] - ETA: 1:34 - loss: 4.3894 - accuracy: 0.0649 277/638 [============>.................] - ETA: 1:34 - loss: 4.3871 - accuracy: 0.0649 278/638 [============>.................] - ETA: 1:33 - loss: 4.3861 - accuracy: 0.0651 279/638 [============>.................] - ETA: 1:33 - loss: 4.3829 - accuracy: 0.0656 280/638 [============>.................] - ETA: 1:33 - loss: 4.3801 - accuracy: 0.0661 281/638 [============>.................] - ETA: 1:32 - loss: 4.3789 - accuracy: 0.0661 282/638 [============>.................] - ETA: 1:32 - loss: 4.3758 - accuracy: 0.0665 283/638 [============>.................] - ETA: 1:32 - loss: 4.3727 - accuracy: 0.0666 284/638 [============>.................] - ETA: 1:32 - loss: 4.3699 - accuracy: 0.0670 285/638 [============>.................] - ETA: 1:31 - loss: 4.3689 - accuracy: 0.0671 286/638 [============>.................] - ETA: 1:31 - loss: 4.3673 - accuracy: 0.0673 287/638 [============>.................] - ETA: 1:31 - loss: 4.3656 - accuracy: 0.0676 288/638 [============>.................] - ETA: 1:31 - loss: 4.3649 - accuracy: 0.0675 289/638 [============>.................] - ETA: 1:30 - loss: 4.3628 - accuracy: 0.0676 290/638 [============>.................] - ETA: 1:30 - loss: 4.3620 - accuracy: 0.0677 291/638 [============>.................] - ETA: 1:30 - loss: 4.3585 - accuracy: 0.0680 292/638 [============>.................] - ETA: 1:30 - loss: 4.3577 - accuracy: 0.0677 293/638 [============>.................] - ETA: 1:29 - loss: 4.3544 - accuracy: 0.0678 294/638 [============>.................] - ETA: 1:29 - loss: 4.3517 - accuracy: 0.0685 295/638 [============>.................] - ETA: 1:29 - loss: 4.3512 - accuracy: 0.0683 296/638 [============>.................] - ETA: 1:29 - loss: 4.3490 - accuracy: 0.0685 297/638 [============>.................] - ETA: 1:28 - loss: 4.3468 - accuracy: 0.0685 298/638 [=============>................] - ETA: 1:28 - loss: 4.3466 - accuracy: 0.0687 299/638 [=============>................] - ETA: 1:28 - loss: 4.3449 - accuracy: 0.0688 300/638 [=============>................] - ETA: 1:28 - loss: 4.3440 - accuracy: 0.0686 301/638 [=============>................] - ETA: 1:27 - loss: 4.3420 - accuracy: 0.0691 302/638 [=============>................] - ETA: 1:27 - loss: 4.3413 - accuracy: 0.0691 303/638 [=============>................] - ETA: 1:27 - loss: 4.3401 - accuracy: 0.0692 304/638 [=============>................] - ETA: 1:27 - loss: 4.3375 - accuracy: 0.0692 305/638 [=============>................] - ETA: 1:26 - loss: 4.3349 - accuracy: 0.0695 306/638 [=============>................] - ETA: 1:26 - loss: 4.3329 - accuracy: 0.0696 307/638 [=============>................] - ETA: 1:26 - loss: 4.3320 - accuracy: 0.0700 308/638 [=============>................] - ETA: 1:26 - loss: 4.3297 - accuracy: 0.0700 309/638 [=============>................] - ETA: 1:25 - loss: 4.3273 - accuracy: 0.0702 310/638 [=============>................] - ETA: 1:25 - loss: 4.3265 - accuracy: 0.0704 311/638 [=============>................] - ETA: 1:25 - loss: 4.3250 - accuracy: 0.0708 312/638 [=============>................] - ETA: 1:25 - loss: 4.3221 - accuracy: 0.0707 313/638 [=============>................] - ETA: 1:24 - loss: 4.3196 - accuracy: 0.0707 314/638 [=============>................] - ETA: 1:24 - loss: 4.3174 - accuracy: 0.0712 315/638 [=============>................] - ETA: 1:24 - loss: 4.3158 - accuracy: 0.0710 316/638 [=============>................] - ETA: 1:24 - loss: 4.3152 - accuracy: 0.0711 317/638 [=============>................] - ETA: 1:23 - loss: 4.3148 - accuracy: 0.0711 318/638 [=============>................] - ETA: 1:23 - loss: 4.3143 - accuracy: 0.0711 319/638 [==============>...............] - ETA: 1:23 - loss: 4.3135 - accuracy: 0.0713 320/638 [==============>...............] - ETA: 1:23 - loss: 4.3177 - accuracy: 0.0715 321/638 [==============>...............] - ETA: 1:22 - loss: 4.3159 - accuracy: 0.0713 322/638 [==============>...............] - ETA: 1:22 - loss: 4.3153 - accuracy: 0.0714 323/638 [==============>...............] - ETA: 1:22 - loss: 4.3141 - accuracy: 0.0715 324/638 [==============>...............] - ETA: 1:22 - loss: 4.3144 - accuracy: 0.0716 325/638 [==============>...............] - ETA: 1:21 - loss: 4.3143 - accuracy: 0.0715 326/638 [==============>...............] - ETA: 1:21 - loss: 4.3143 - accuracy: 0.0717 327/638 [==============>...............] - ETA: 1:21 - loss: 4.3136 - accuracy: 0.0718 328/638 [==============>...............] - ETA: 1:20 - loss: 4.3117 - accuracy: 0.0719 329/638 [==============>...............] - ETA: 1:20 - loss: 4.3104 - accuracy: 0.0723 330/638 [==============>...............] - ETA: 1:20 - loss: 4.3082 - accuracy: 0.0725 331/638 [==============>...............] - ETA: 1:20 - loss: 4.3070 - accuracy: 0.0726 332/638 [==============>...............] - ETA: 1:19 - loss: 4.3048 - accuracy: 0.0727 333/638 [==============>...............] - ETA: 1:19 - loss: 4.3032 - accuracy: 0.0728 334/638 [==============>...............] - ETA: 1:19 - loss: 4.3037 - accuracy: 0.0728 335/638 [==============>...............] - ETA: 1:19 - loss: 4.3021 - accuracy: 0.0729 336/638 [==============>...............] - ETA: 1:18 - loss: 4.2997 - accuracy: 0.0729 337/638 [==============>...............] - ETA: 1:18 - loss: 4.3002 - accuracy: 0.0729 338/638 [==============>...............] - ETA: 1:18 - loss: 4.3004 - accuracy: 0.0729 339/638 [==============>...............] - ETA: 1:18 - loss: 4.2990 - accuracy: 0.0729 340/638 [==============>...............] - ETA: 1:17 - loss: 4.2974 - accuracy: 0.0730 341/638 [===============>..............] - ETA: 1:17 - loss: 4.2976 - accuracy: 0.0729 342/638 [===============>..............] - ETA: 1:17 - loss: 4.2955 - accuracy: 0.0733 343/638 [===============>..............] - ETA: 1:17 - loss: 4.2938 - accuracy: 0.0733 344/638 [===============>..............] - ETA: 1:16 - loss: 4.2950 - accuracy: 0.0736 345/638 [===============>..............] - ETA: 1:16 - loss: 4.2920 - accuracy: 0.0740 346/638 [===============>..............] - ETA: 1:16 - loss: 4.2897 - accuracy: 0.0742 347/638 [===============>..............] - ETA: 1:16 - loss: 4.2882 - accuracy: 0.0741 348/638 [===============>..............] - ETA: 1:15 - loss: 4.2862 - accuracy: 0.0742 349/638 [===============>..............] - ETA: 1:15 - loss: 4.2856 - accuracy: 0.0744 350/638 [===============>..............] - ETA: 1:15 - loss: 4.2842 - accuracy: 0.0744 351/638 [===============>..............] - ETA: 1:15 - loss: 4.2843 - accuracy: 0.0743 352/638 [===============>..............] - ETA: 1:14 - loss: 4.2830 - accuracy: 0.0745 353/638 [===============>..............] - ETA: 1:14 - loss: 4.2823 - accuracy: 0.0745 354/638 [===============>..............] - ETA: 1:14 - loss: 4.2797 - accuracy: 0.0749 355/638 [===============>..............] - ETA: 1:14 - loss: 4.2786 - accuracy: 0.0747 356/638 [===============>..............] - ETA: 1:13 - loss: 4.2783 - accuracy: 0.0750 357/638 [===============>..............] - ETA: 1:13 - loss: 4.2793 - accuracy: 0.0749 358/638 [===============>..............] - ETA: 1:13 - loss: 4.2768 - accuracy: 0.0752 359/638 [===============>..............] - ETA: 1:13 - loss: 4.2753 - accuracy: 0.0751 360/638 [===============>..............] - ETA: 1:12 - loss: 4.2731 - accuracy: 0.0753 361/638 [===============>..............] - ETA: 1:12 - loss: 4.2718 - accuracy: 0.0755 362/638 [================>.............] - ETA: 1:12 - loss: 4.2709 - accuracy: 0.0754 363/638 [================>.............] - ETA: 1:12 - loss: 4.2696 - accuracy: 0.0755 364/638 [================>.............] - ETA: 1:11 - loss: 4.2685 - accuracy: 0.0756 365/638 [================>.............] - ETA: 1:11 - loss: 4.2705 - accuracy: 0.0757 366/638 [================>.............] - ETA: 1:11 - loss: 4.2684 - accuracy: 0.0756 367/638 [================>.............] - ETA: 1:11 - loss: 4.2672 - accuracy: 0.0760 368/638 [================>.............] - ETA: 1:10 - loss: 4.2640 - accuracy: 0.0762 369/638 [================>.............] - ETA: 1:10 - loss: 4.2607 - accuracy: 0.0767 370/638 [================>.............] - ETA: 1:10 - loss: 4.2602 - accuracy: 0.0769 371/638 [================>.............] - ETA: 1:10 - loss: 4.2579 - accuracy: 0.0773 372/638 [================>.............] - ETA: 1:09 - loss: 4.2575 - accuracy: 0.0774 373/638 [================>.............] - ETA: 1:09 - loss: 4.2561 - accuracy: 0.0774 374/638 [================>.............] - ETA: 1:09 - loss: 4.2563 - accuracy: 0.0775 375/638 [================>.............] - ETA: 1:08 - loss: 4.2537 - accuracy: 0.0776 376/638 [================>.............] - ETA: 1:08 - loss: 4.2518 - accuracy: 0.0776 377/638 [================>.............] - ETA: 1:08 - loss: 4.2511 - accuracy: 0.0775 378/638 [================>.............] - ETA: 1:08 - loss: 4.2500 - accuracy: 0.0774 379/638 [================>.............] - ETA: 1:07 - loss: 4.2493 - accuracy: 0.0773 380/638 [================>.............] - ETA: 1:07 - loss: 4.2474 - accuracy: 0.0776 381/638 [================>.............] - ETA: 1:07 - loss: 4.2472 - accuracy: 0.0776 382/638 [================>.............] - ETA: 1:07 - loss: 4.2465 - accuracy: 0.0778 383/638 [=================>............] - ETA: 1:06 - loss: 4.2441 - accuracy: 0.0778 384/638 [=================>............] - ETA: 1:06 - loss: 4.2418 - accuracy: 0.0779 385/638 [=================>............] - ETA: 1:06 - loss: 4.2408 - accuracy: 0.0779 386/638 [=================>............] - ETA: 1:06 - loss: 4.2394 - accuracy: 0.0778 387/638 [=================>............] - ETA: 1:05 - loss: 4.2387 - accuracy: 0.0778 388/638 [=================>............] - ETA: 1:05 - loss: 4.2396 - accuracy: 0.0778 389/638 [=================>............] - ETA: 1:05 - loss: 4.2389 - accuracy: 0.0779 390/638 [=================>............] - ETA: 1:05 - loss: 4.2376 - accuracy: 0.0780 391/638 [=================>............] - ETA: 1:04 - loss: 4.2358 - accuracy: 0.0782 392/638 [=================>............] - ETA: 1:04 - loss: 4.2349 - accuracy: 0.0783 393/638 [=================>............] - ETA: 1:04 - loss: 4.2331 - accuracy: 0.0784 394/638 [=================>............] - ETA: 1:04 - loss: 4.2312 - accuracy: 0.0786 395/638 [=================>............] - ETA: 1:03 - loss: 4.2289 - accuracy: 0.0789 396/638 [=================>............] - ETA: 1:03 - loss: 4.2266 - accuracy: 0.0792 397/638 [=================>............] - ETA: 1:03 - loss: 4.2253 - accuracy: 0.0792 398/638 [=================>............] - ETA: 1:02 - loss: 4.2241 - accuracy: 0.0792 399/638 [=================>............] - ETA: 1:02 - loss: 4.2214 - accuracy: 0.0796 400/638 [=================>............] - ETA: 1:02 - loss: 4.2215 - accuracy: 0.0795 401/638 [=================>............] - ETA: 1:02 - loss: 4.2204 - accuracy: 0.0793 402/638 [=================>............] - ETA: 1:01 - loss: 4.2196 - accuracy: 0.0795 403/638 [=================>............] - ETA: 1:01 - loss: 4.2175 - accuracy: 0.0798 404/638 [=================>............] - ETA: 1:01 - loss: 4.2153 - accuracy: 0.0803 405/638 [==================>...........] - ETA: 1:01 - loss: 4.2146 - accuracy: 0.0803 406/638 [==================>...........] - ETA: 1:00 - loss: 4.2129 - accuracy: 0.0806 407/638 [==================>...........] - ETA: 1:00 - loss: 4.2104 - accuracy: 0.0809 408/638 [==================>...........] - ETA: 1:00 - loss: 4.2100 - accuracy: 0.0807 409/638 [==================>...........] - ETA: 1:00 - loss: 4.2088 - accuracy: 0.0809 410/638 [==================>...........] - ETA: 59s - loss: 4.2067 - accuracy: 0.0809 411/638 [==================>...........] - ETA: 59s - loss: 4.2053 - accuracy: 0.0812 412/638 [==================>...........] - ETA: 59s - loss: 4.2042 - accuracy: 0.0811 413/638 [==================>...........] - ETA: 59s - loss: 4.2034 - accuracy: 0.0811 414/638 [==================>...........] - ETA: 58s - loss: 4.2017 - accuracy: 0.0813 415/638 [==================>...........] - ETA: 58s - loss: 4.1999 - accuracy: 0.0816 416/638 [==================>...........] - ETA: 58s - loss: 4.1987 - accuracy: 0.0817 417/638 [==================>...........] - ETA: 58s - loss: 4.1976 - accuracy: 0.0818 418/638 [==================>...........] - ETA: 57s - loss: 4.1968 - accuracy: 0.0820 419/638 [==================>...........] - ETA: 57s - loss: 4.1966 - accuracy: 0.0821 420/638 [==================>...........] - ETA: 57s - loss: 4.1962 - accuracy: 0.0821 421/638 [==================>...........] - ETA: 56s - loss: 4.1967 - accuracy: 0.0823 422/638 [==================>...........] - ETA: 56s - loss: 4.1976 - accuracy: 0.0823 423/638 [==================>...........] - ETA: 56s - loss: 4.1966 - accuracy: 0.0822 424/638 [==================>...........] - ETA: 56s - loss: 4.1961 - accuracy: 0.0823 425/638 [==================>...........] - ETA: 55s - loss: 4.1931 - accuracy: 0.0825 426/638 [===================>..........] - ETA: 55s - loss: 4.1918 - accuracy: 0.0826 427/638 [===================>..........] - ETA: 55s - loss: 4.1911 - accuracy: 0.0826 428/638 [===================>..........] - ETA: 55s - loss: 4.1895 - accuracy: 0.0827 429/638 [===================>..........] - ETA: 54s - loss: 4.1877 - accuracy: 0.0828 430/638 [===================>..........] - ETA: 54s - loss: 4.1862 - accuracy: 0.0830 431/638 [===================>..........] - ETA: 54s - loss: 4.1859 - accuracy: 0.0830 432/638 [===================>..........] - ETA: 54s - loss: 4.1858 - accuracy: 0.0830 433/638 [===================>..........] - ETA: 53s - loss: 4.1852 - accuracy: 0.0830 434/638 [===================>..........] - ETA: 53s - loss: 4.1837 - accuracy: 0.0832 435/638 [===================>..........] - ETA: 53s - loss: 4.1826 - accuracy: 0.0833 436/638 [===================>..........] - ETA: 53s - loss: 4.1808 - accuracy: 0.0832 437/638 [===================>..........] - ETA: 52s - loss: 4.1795 - accuracy: 0.0832 438/638 [===================>..........] - ETA: 52s - loss: 4.1784 - accuracy: 0.0834 439/638 [===================>..........] - ETA: 52s - loss: 4.1786 - accuracy: 0.0835 440/638 [===================>..........] - ETA: 52s - loss: 4.1776 - accuracy: 0.0834 441/638 [===================>..........] - ETA: 51s - loss: 4.1759 - accuracy: 0.0835 442/638 [===================>..........] - ETA: 51s - loss: 4.1743 - accuracy: 0.0838 443/638 [===================>..........] - ETA: 51s - loss: 4.1730 - accuracy: 0.0839 444/638 [===================>..........] - ETA: 50s - loss: 4.1716 - accuracy: 0.0838 445/638 [===================>..........] - ETA: 50s - loss: 4.1701 - accuracy: 0.0839 446/638 [===================>..........] - ETA: 50s - loss: 4.1688 - accuracy: 0.0839 447/638 [====================>.........] - ETA: 50s - loss: 4.1670 - accuracy: 0.0842 448/638 [====================>.........] - ETA: 49s - loss: 4.1667 - accuracy: 0.0842 449/638 [====================>.........] - ETA: 49s - loss: 4.1656 - accuracy: 0.0844 450/638 [====================>.........] - ETA: 49s - loss: 4.1646 - accuracy: 0.0843 451/638 [====================>.........] - ETA: 49s - loss: 4.1630 - accuracy: 0.0845 452/638 [====================>.........] - ETA: 48s - loss: 4.1625 - accuracy: 0.0846 453/638 [====================>.........] - ETA: 48s - loss: 4.1630 - accuracy: 0.0844 454/638 [====================>.........] - ETA: 48s - loss: 4.1619 - accuracy: 0.0844 455/638 [====================>.........] - ETA: 48s - loss: 4.1605 - accuracy: 0.0845 456/638 [====================>.........] - ETA: 47s - loss: 4.1587 - accuracy: 0.0846 457/638 [====================>.........] - ETA: 47s - loss: 4.1568 - accuracy: 0.0849 458/638 [====================>.........] - ETA: 47s - loss: 4.1553 - accuracy: 0.0849 459/638 [====================>.........] - ETA: 47s - loss: 4.1548 - accuracy: 0.0848 460/638 [====================>.........] - ETA: 46s - loss: 4.1534 - accuracy: 0.0847 461/638 [====================>.........] - ETA: 46s - loss: 4.1533 - accuracy: 0.0848 462/638 [====================>.........] - ETA: 46s - loss: 4.1526 - accuracy: 0.0850 463/638 [====================>.........] - ETA: 45s - loss: 4.1512 - accuracy: 0.0852 464/638 [====================>.........] - ETA: 45s - loss: 4.1516 - accuracy: 0.0854 465/638 [====================>.........] - ETA: 45s - loss: 4.1505 - accuracy: 0.0856 466/638 [====================>.........] - ETA: 45s - loss: 4.1505 - accuracy: 0.0859 467/638 [====================>.........] - ETA: 44s - loss: 4.1503 - accuracy: 0.0859 468/638 [=====================>........] - ETA: 44s - loss: 4.1511 - accuracy: 0.0858 469/638 [=====================>........] - ETA: 44s - loss: 4.1501 - accuracy: 0.0859 470/638 [=====================>........] - ETA: 44s - loss: 4.1492 - accuracy: 0.0863 471/638 [=====================>........] - ETA: 43s - loss: 4.1475 - accuracy: 0.0865 472/638 [=====================>........] - ETA: 43s - loss: 4.1460 - accuracy: 0.0867 473/638 [=====================>........] - ETA: 43s - loss: 4.1457 - accuracy: 0.0868 474/638 [=====================>........] - ETA: 43s - loss: 4.1434 - accuracy: 0.0870 475/638 [=====================>........] - ETA: 42s - loss: 4.1436 - accuracy: 0.0870 476/638 [=====================>........] - ETA: 42s - loss: 4.1416 - accuracy: 0.0873 477/638 [=====================>........] - ETA: 42s - loss: 4.1413 - accuracy: 0.0873 478/638 [=====================>........] - ETA: 42s - loss: 4.1392 - accuracy: 0.0876 479/638 [=====================>........] - ETA: 41s - loss: 4.1378 - accuracy: 0.0878 480/638 [=====================>........] - ETA: 41s - loss: 4.1363 - accuracy: 0.0880 481/638 [=====================>........] - ETA: 41s - loss: 4.1362 - accuracy: 0.0882 482/638 [=====================>........] - ETA: 40s - loss: 4.1345 - accuracy: 0.0884 483/638 [=====================>........] - ETA: 40s - loss: 4.1321 - accuracy: 0.0887 484/638 [=====================>........] - ETA: 40s - loss: 4.1318 - accuracy: 0.0888 485/638 [=====================>........] - ETA: 40s - loss: 4.1303 - accuracy: 0.0890 486/638 [=====================>........] - ETA: 39s - loss: 4.1288 - accuracy: 0.0891 487/638 [=====================>........] - ETA: 39s - loss: 4.1281 - accuracy: 0.0891 488/638 [=====================>........] - ETA: 39s - loss: 4.1278 - accuracy: 0.0893 489/638 [=====================>........] - ETA: 39s - loss: 4.1278 - accuracy: 0.0895 490/638 [======================>.......] - ETA: 38s - loss: 4.1258 - accuracy: 0.0898 491/638 [======================>.......] - ETA: 38s - loss: 4.1237 - accuracy: 0.0901 492/638 [======================>.......] - ETA: 38s - loss: 4.1234 - accuracy: 0.0901 493/638 [======================>.......] - ETA: 38s - loss: 4.1220 - accuracy: 0.0902 494/638 [======================>.......] - ETA: 37s - loss: 4.1211 - accuracy: 0.0901 495/638 [======================>.......] - ETA: 37s - loss: 4.1210 - accuracy: 0.0900 496/638 [======================>.......] - ETA: 37s - loss: 4.1204 - accuracy: 0.0900 497/638 [======================>.......] - ETA: 37s - loss: 4.1187 - accuracy: 0.0901 498/638 [======================>.......] - ETA: 36s - loss: 4.1179 - accuracy: 0.0902 499/638 [======================>.......] - ETA: 36s - loss: 4.1160 - accuracy: 0.0904 500/638 [======================>.......] - ETA: 36s - loss: 4.1157 - accuracy: 0.0906 501/638 [======================>.......] - ETA: 35s - loss: 4.1144 - accuracy: 0.0908 502/638 [======================>.......] - ETA: 35s - loss: 4.1132 - accuracy: 0.0908 503/638 [======================>.......] - ETA: 35s - loss: 4.1116 - accuracy: 0.0908 504/638 [======================>.......] - ETA: 35s - loss: 4.1113 - accuracy: 0.0908 505/638 [======================>.......] - ETA: 34s - loss: 4.1103 - accuracy: 0.0909 506/638 [======================>.......] - ETA: 34s - loss: 4.1095 - accuracy: 0.0909 507/638 [======================>.......] - ETA: 34s - loss: 4.1073 - accuracy: 0.0912 508/638 [======================>.......] - ETA: 34s - loss: 4.1059 - accuracy: 0.0914 509/638 [======================>.......] - ETA: 33s - loss: 4.1051 - accuracy: 0.0915 510/638 [======================>.......] - ETA: 33s - loss: 4.1037 - accuracy: 0.0917 511/638 [=======================>......] - ETA: 33s - loss: 4.1021 - accuracy: 0.0919 512/638 [=======================>......] - ETA: 33s - loss: 4.1019 - accuracy: 0.0920 513/638 [=======================>......] - ETA: 32s - loss: 4.1004 - accuracy: 0.0922 514/638 [=======================>......] - ETA: 32s - loss: 4.0989 - accuracy: 0.0924 515/638 [=======================>......] - ETA: 32s - loss: 4.0979 - accuracy: 0.0924 516/638 [=======================>......] - ETA: 32s - loss: 4.0981 - accuracy: 0.0924 517/638 [=======================>......] - ETA: 31s - loss: 4.0982 - accuracy: 0.0928 518/638 [=======================>......] - ETA: 31s - loss: 4.0980 - accuracy: 0.0928 519/638 [=======================>......] - ETA: 31s - loss: 4.0964 - accuracy: 0.0931 520/638 [=======================>......] - ETA: 30s - loss: 4.0944 - accuracy: 0.0931 521/638 [=======================>......] - ETA: 30s - loss: 4.0925 - accuracy: 0.0932 522/638 [=======================>......] - ETA: 30s - loss: 4.0912 - accuracy: 0.0934 523/638 [=======================>......] - ETA: 30s - loss: 4.0902 - accuracy: 0.0934 524/638 [=======================>......] - ETA: 29s - loss: 4.0906 - accuracy: 0.0933 525/638 [=======================>......] - ETA: 29s - loss: 4.0907 - accuracy: 0.0933 526/638 [=======================>......] - ETA: 29s - loss: 4.0885 - accuracy: 0.0936 527/638 [=======================>......] - ETA: 29s - loss: 4.0878 - accuracy: 0.0937 528/638 [=======================>......] - ETA: 28s - loss: 4.0879 - accuracy: 0.0937 529/638 [=======================>......] - ETA: 28s - loss: 4.0858 - accuracy: 0.0940 530/638 [=======================>......] - ETA: 28s - loss: 4.0839 - accuracy: 0.0945 531/638 [=======================>......] - ETA: 28s - loss: 4.0830 - accuracy: 0.0944 532/638 [========================>.....] - ETA: 27s - loss: 4.0814 - accuracy: 0.0946 533/638 [========================>.....] - ETA: 27s - loss: 4.0802 - accuracy: 0.0946 534/638 [========================>.....] - ETA: 27s - loss: 4.0789 - accuracy: 0.0950 532025-04-09 18:19:17.520753: W tensorflow/core/lib/png/png_io.cc:88] PNG warning: iCCP: known incorrect sRGB profile +5/638 [========================>.....] - ETA: 27s - loss: 4.0773 - accuracy: 0.0954 536/638 [========================>.....] - ETA: 26s - loss: 4.0754 - accuracy: 0.0956 537/638 [========================>.....] - ETA: 26s - loss: 4.0759 - accuracy: 0.0955 538/638 [========================>.....] - ETA: 26s - loss: 4.0753 - accuracy: 0.0955 539/638 [========================>.....] - ETA: 25s - loss: 4.0733 - accuracy: 0.0958 540/638 [========================>.....] - ETA: 25s - loss: 4.0725 - accuracy: 0.0959 541/638 [========================>.....] - ETA: 25s - loss: 4.0718 - accuracy: 0.0961 542/638 [========================>.....] - ETA: 25s - loss: 4.0713 - accuracy: 0.0960 543/638 [========================>.....] - ETA: 24s - loss: 4.0695 - accuracy: 0.0963 544/638 [========================>.....] - ETA: 24s - loss: 4.0685 - accuracy: 0.0964 545/638 [========================>.....] - ETA: 24s - loss: 4.0690 - accuracy: 0.0963 546/638 [========================>.....] - ETA: 24s - loss: 4.0688 - accuracy: 0.0967 547/638 [========================>.....] - ETA: 23s - loss: 4.0681 - accuracy: 0.0968 548/638 [========================>.....] - ETA: 23s - loss: 4.0670 - accuracy: 0.0967 549/638 [========================>.....] - ETA: 23s - loss: 4.0659 - accuracy: 0.0967 550/638 [========================>.....] - ETA: 23s - loss: 4.0650 - accuracy: 0.0967 551/638 [========================>.....] - ETA: 22s - loss: 4.0628 - accuracy: 0.0969 552/638 [========================>.....] - ETA: 22s - loss: 4.0614 - accuracy: 0.0970 553/638 [=========================>....] - ETA: 22s - loss: 4.0609 - accuracy: 0.0970 554/638 [=========================>....] - ETA: 22s - loss: 4.0588 - accuracy: 0.0971 555/638 [=========================>....] - ETA: 21s - loss: 4.0575 - accuracy: 0.0972 556/638 [=========================>....] - ETA: 21s - loss: 4.0565 - accuracy: 0.0973 557/638 [=========================>....] - ETA: 21s - loss: 4.0558 - accuracy: 0.0976 558/638 [=========================>....] - ETA: 21s - loss: 4.0540 - accuracy: 0.0978 559/638 [=========================>....] - ETA: 20s - loss: 4.0531 - accuracy: 0.0980 560/638 [=========================>....] - ETA: 20s - loss: 4.0522 - accuracy: 0.0983 561/638 [=========================>....] - ETA: 20s - loss: 4.0512 - accuracy: 0.0983 562/638 [=========================>....] - ETA: 19s - loss: 4.0499 - accuracy: 0.0985 563/638 [=========================>....] - ETA: 19s - loss: 4.0511 - accuracy: 0.0985 564/638 [=========================>....] - ETA: 19s - loss: 4.0507 - accuracy: 0.0985 565/638 [=========================>....] - ETA: 19s - loss: 4.0491 - accuracy: 0.0985 566/638 [=========================>....] - ETA: 18s - loss: 4.0498 - accuracy: 0.0984 567/638 [=========================>....] - ETA: 18s - loss: 4.0492 - accuracy: 0.0985 568/638 [=========================>....] - ETA: 18s - loss: 4.0470 - accuracy: 0.0987 569/638 [=========================>....] - ETA: 18s - loss: 4.0454 - accuracy: 0.0989 570/638 [=========================>....] - ETA: 17s - loss: 4.0438 - accuracy: 0.0992 571/638 [=========================>....] - ETA: 17s - loss: 4.0432 - accuracy: 0.0992 572/638 [=========================>....] - ETA: 17s - loss: 4.0424 - accuracy: 0.0992 573/638 [=========================>....] - ETA: 17s - loss: 4.0410 - accuracy: 0.0995 574/638 [=========================>....] - ETA: 16s - loss: 4.0387 - accuracy: 0.0999 575/638 [==========================>...] - ETA: 16s - loss: 4.0384 - accuracy: 0.1001 576/638 [==========================>...] - ETA: 16s - loss: 4.0369 - accuracy: 0.1002 577/638 [==========================>...] - ETA: 16s - loss: 4.0356 - accuracy: 0.1004 578/638 [==========================>...] - ETA: 15s - loss: 4.0352 - accuracy: 0.1003 579/638 [==========================>...] - ETA: 15s - loss: 4.0336 - accuracy: 0.1005 580/638 [==========================>...] - ETA: 15s - loss: 4.0319 - accuracy: 0.1008 581/638 [==========================>...] - ETA: 15s - loss: 4.0313 - accuracy: 0.1006 582/638 [==========================>...] - ETA: 14s - loss: 4.0307 - accuracy: 0.1006 583/638 [==========================>...] - ETA: 14s - loss: 4.0295 - accuracy: 0.1008 584/638 [==========================>...] - ETA: 14s - loss: 4.0293 - accuracy: 0.1008 585/638 [==========================>...] - ETA: 13s - loss: 4.0290 - accuracy: 0.1008 586/638 [==========================>...] - ETA: 13s - loss: 4.0278 - accuracy: 0.1011 587/638 [==========================>...] - ETA: 13s - loss: 4.0265 - accuracy: 0.1011 588/638 [==========================>...] - ETA: 13s - loss: 4.0264 - accuracy: 0.1013 589/638 [==========================>...] - ETA: 12s - loss: 4.0257 - accuracy: 0.1015 590/638 [==========================>...] - ETA: 12s - loss: 4.0242 - accuracy: 0.1017 591/638 [==========================>...] - ETA: 12s - loss: 4.0232 - accuracy: 0.1019 592/638 [==========================>...] - ETA: 12s - loss: 4.0224 - accuracy: 0.1021 593/638 [==========================>...] - ETA: 11s - loss: 4.0223 - accuracy: 0.1021 594/638 [==========================>...] - ETA: 11s - loss: 4.0206 - accuracy: 0.1021 595/638 [==========================>...] - ETA: 11s - loss: 4.0202 - accuracy: 0.1023 596/638 [===========================>..] - ETA: 11s - loss: 4.0202 - accuracy: 0.1025 597/638 [===========================>..] - ETA: 10s - loss: 4.0186 - accuracy: 0.1025 598/638 [===========================>..] - ETA: 10s - loss: 4.0172 - accuracy: 0.1026 599/638 [===========================>..] - ETA: 10s - loss: 4.0163 - accuracy: 0.1026 600/638 [===========================>..] - ETA: 10s - loss: 4.0160 - accuracy: 0.1027 601/638 [===========================>..] - ETA: 9s - loss: 4.0139 - accuracy: 0.1028 602/638 [===========================>..] - ETA: 9s - loss: 4.0124 - accuracy: 0.1029 603/638 [===========================>..] - ETA: 9s - loss: 4.0113 - accuracy: 0.1031 604/638 [===========================>..] - ETA: 8s - loss: 4.0109 - accuracy: 0.1031 605/638 [===========================>..] - ETA: 8s - loss: 4.0094 - accuracy: 0.1032 606/638 [===========================>..] - ETA: 8s - loss: 4.0085 - accuracy: 0.1035 607/638 [===========================>..] - ETA: 8s - loss: 4.0085 - accuracy: 0.1035 608/638 [===========================>..] - ETA: 7s - loss: 4.0073 - accuracy: 0.1038 609/638 [===========================>..] - ETA: 7s - loss: 4.0070 - accuracy: 0.1038 610/638 [===========================>..] - ETA: 7s - loss: 4.0054 - accuracy: 0.1039 611/638 [===========================>..] - ETA: 7s - loss: 4.0040 - accuracy: 0.1040 612/638 [===========================>..] - ETA: 6s - loss: 4.0044 - accuracy: 0.1041 613/638 [===========================>..] - ETA: 6s - loss: 4.0036 - accuracy: 0.1043 614/638 [===========================>..] - ETA: 6s - loss: 4.0027 - accuracy: 0.1043 615/638 [===========================>..] - ETA: 6s - loss: 4.0026 - accuracy: 0.1043 616/638 [===========================>..] - ETA: 5s - loss: 4.0011 - accuracy: 0.1043 617/638 [============================>.] - ETA: 5s - loss: 3.9999 - accuracy: 0.1045 618/638 [============================>.] - ETA: 5s - loss: 3.9996 - accuracy: 0.1044 619/638 [============================>.] - ETA: 4s - loss: 3.9985 - accuracy: 0.1045 620/638 [============================>.] - ETA: 4s - loss: 3.9991 - accuracy: 0.1045 621/638 [============================>.] - ETA: 4s - loss: 3.9977 - accuracy: 0.1044 622/638 [============================>.] - ETA: 4s - loss: 3.9977 - accuracy: 0.1045 623/638 [============================>.] - ETA: 3s - loss: 3.9977 - accuracy: 0.1046 624/638 [============================>.] - ETA: 3s - loss: 3.9974 - accuracy: 0.1048 625/638 [============================>.] - ETA: 3s - loss: 3.9975 - accuracy: 0.1049 626/638 [============================>.] - ETA: 3s - loss: 3.9975 - accuracy: 0.1048 627/638 [============================>.] - ETA: 2s - loss: 3.9958 - accuracy: 0.1052 628/638 [============================>.] - ETA: 2s - loss: 3.9949 - accuracy: 0.1052 629/638 [============================>.] - ETA: 2s - loss: 3.9937 - accuracy: 0.1053 630/638 [============================>.] - ETA: 2s - loss: 3.9925 - accuracy: 0.1054 631/638 [============================>.] - ETA: 1s - loss: 3.9908 - accuracy: 0.1055 632/638 [============================>.] - ETA: 1s - loss: 3.9903 - accuracy: 0.1056 633/638 [============================>.] - ETA: 1s - loss: 3.9893 - accuracy: 0.1057 634/638 [============================>.] - ETA: 1s - loss: 3.9884 - accuracy: 0.1058 635/638 [============================>.] - ETA: 0s - loss: 3.9872 - accuracy: 0.1059 636/638 [============================>.] - ETA: 0s - loss: 3.9864 - accuracy: 0.1060 637/638 [============================>.2025-04-09 18:19:46.595154: W tensorflow/core/grappler/optimizers/data/auto_shard.cc:784] AUTO sharding policy will apply DATA sharding policy as it failed to apply FILE sharding policy because of the following reason: Found an unshardable source dataset: name: "TensorSliceDataset/_1" +op: "TensorSliceDataset" +input: "Placeholder/_0" +attr { + key: "Toutput_types" + value { + list { + type: DT_STRING + } + } +} +attr { + key: "_cardinality" + value { + i: 5102 + } +} +attr { + key: "is_files" + value { + b: false + } +} +attr { + key: "metadata" + value { + s: "\n\024TensorSliceDataset:7" + } +} +attr { + key: "output_shapes" + value { + list { + shape { + } + } + } +} +attr { + key: "replicate_on_split" + value { + b: false + } +} +experimental_type { + type_id: TFT_PRODUCT + args { + type_id: TFT_DATASET + args { + type_id: TFT_PRODUCT + args { + type_id: TFT_TENSOR + args { + type_id: TFT_STRING + } + } + } + } +} + +] - ETA: 0s - loss: 3.9850 - accuracy: 0.1061 638/638 [==============================] - ETA: 0s - loss: 3.9849 - accuracy: 0.1061 638/638 [==============================] - 214s 307ms/step - loss: 3.9849 - accuracy: 0.1061 - val_loss: 4.0034 - val_accuracy: 0.1015 +Epoch 2/20 + 1/638 [..............................] - ETA: 9:35 - loss: 3.5540 - accuracy: 0.2188 2/638 [..............................] - ETA: 3:07 - loss: 3.5350 - accuracy: 0.1406 3/638 [..............................] - ETA: 3:05 - loss: 3.3892 - accuracy: 0.1354 4/638 [..............................] - ETA: 3:07 - loss: 3.3924 - accuracy: 0.1562 5/638 [..............................] - ETA: 3:04 - loss: 3.4678 - accuracy: 0.1688 6/638 [..............................] - ETA: 2:59 - loss: 3.4290 - accuracy: 0.1823 7/638 [..............................] - ETA: 2:56 - loss: 3.5196 - accuracy: 0.1696 8/638 [..............................] - ETA: 2:54 - loss: 3.4554 - accuracy: 0.1602 9/638 [..............................] - ETA: 3:15 - loss: 3.4445 - accuracy: 0.1632 10/638 [..............................] - ETA: 3:11 - loss: 3.4641 - accuracy: 0.1688 11/638 [..............................] - ETA: 3:16 - loss: 3.4603 - accuracy: 0.1676 12/638 [..............................] - ETA: 3:18 - loss: 3.4314 - accuracy: 0.1771 13/638 [..............................] - ETA: 3:14 - loss: 3.4881 - accuracy: 0.1779 14/638 [..............................] - ETA: 3:13 - loss: 3.5081 - accuracy: 0.1741 15/638 [..............................] - ETA: 3:11 - loss: 3.5045 - accuracy: 0.1729 16/638 [..............................] - ETA: 3:08 - loss: 3.5027 - accuracy: 0.1777 17/638 [..............................] - ETA: 3:08 - loss: 3.4930 - accuracy: 0.1820 18/638 [..............................] - ETA: 3:06 - loss: 3.4796 - accuracy: 0.1788 19/638 [..............................] - ETA: 3:04 - loss: 3.4973 - accuracy: 0.1776 20/638 [..............................] - ETA: 3:03 - loss: 3.4821 - accuracy: 0.1781 21/638 [..............................] - ETA: 3:01 - loss: 3.4756 - accuracy: 0.1786 22/638 [>.............................] - ETA: 3:00 - loss: 3.4676 - accuracy: 0.1776 23/638 [>.............................] - ETA: 2:59 - loss: 3.4938 - accuracy: 0.1726 24/638 [>.............................] - ETA: 2:58 - loss: 3.5010 - accuracy: 0.1732 25/638 [>.............................] - ETA: 2:57 - loss: 3.5025 - accuracy: 0.1700 26/638 [>.............................] - ETA: 2:57 - loss: 3.4877 - accuracy: 0.1671 27/638 [>.............................] - ETA: 2:56 - loss: 3.4575 - accuracy: 0.1759 28/638 [>.............................] - ETA: 2:55 - loss: 3.4444 - accuracy: 0.1730 29/638 [>.............................] - ETA: 2:54 - loss: 3.4284 - accuracy: 0.1756 30/638 [>.............................] - ETA: 2:54 - loss: 3.4387 - accuracy: 0.1740 31/638 [>.............................] - ETA: 2:53 - loss: 3.4542 - accuracy: 0.1754 32/638 [>.............................] - ETA: 2:52 - loss: 3.4632 - accuracy: 0.1748 33/638 [>.............................] - ETA: 2:54 - loss: 3.4725 - accuracy: 0.1752 34/638 [>.............................] - ETA: 2:54 - loss: 3.4688 - accuracy: 0.1737 35/638 [>.............................] - ETA: 2:53 - loss: 3.4536 - accuracy: 0.1768 36/638 [>.............................] - ETA: 2:52 - loss: 3.4438 - accuracy: 0.1753 37/638 [>.............................] - ETA: 2:51 - loss: 3.4313 - accuracy: 0.1824 38/638 [>.............................] - ETA: 2:50 - loss: 3.4356 - accuracy: 0.1826 39/638 [>.............................] - ETA: 2:50 - loss: 3.4525 - accuracy: 0.1827 40/638 [>.............................] - ETA: 2:49 - loss: 3.4402 - accuracy: 0.1852 41/638 [>.............................] - ETA: 2:49 - loss: 3.4516 - accuracy: 0.1852 42/638 [>.............................] - ETA: 2:48 - loss: 3.4436 - accuracy: 0.1853 43/638 [=>............................] - ETA: 2:47 - loss: 3.4370 - accuracy: 0.1846 44/638 [=>............................] - ETA: 2:47 - loss: 3.4387 - accuracy: 0.1868 45/638 [=>............................] - ETA: 2:46 - loss: 3.4353 - accuracy: 0.1896 46/638 [=>............................] - ETA: 2:45 - loss: 3.4485 - accuracy: 0.1875 47/638 [=>............................] - ETA: 2:45 - loss: 3.4363 - accuracy: 0.1868 48/638 [=>............................] - ETA: 2:44 - loss: 3.4435 - accuracy: 0.1849 49/638 [=>............................] - ETA: 2:44 - loss: 3.4460 - accuracy: 0.1849 50/638 [=>............................] - ETA: 2:44 - loss: 3.4360 - accuracy: 0.1869 51/638 [=>............................] - ETA: 2:44 - loss: 3.4362 - accuracy: 0.1857 52/638 [=>............................] - ETA: 2:44 - loss: 3.4324 - accuracy: 0.1881 53/638 [=>............................] - ETA: 2:43 - loss: 3.4325 - accuracy: 0.1869 54/638 [=>............................] - ETA: 2:43 - loss: 3.4434 - accuracy: 0.1840 55/638 [=>............................] - ETA: 2:43 - loss: 3.4383 - accuracy: 0.1841 56/638 [=>............................] - ETA: 2:42 - loss: 3.4272 - accuracy: 0.1858 57/638 [=>............................] - ETA: 2:42 - loss: 3.4318 - accuracy: 0.1842 58/638 [=>............................] - ETA: 2:42 - loss: 3.4262 - accuracy: 0.1853 59/638 [=>............................] - ETA: 2:42 - loss: 3.4196 - accuracy: 0.1843 60/638 [=>............................] - ETA: 2:42 - loss: 3.4292 - accuracy: 0.1828 61/638 [=>............................] - ETA: 2:41 - loss: 3.4300 - accuracy: 0.1819 62/638 [=>............................] - ETA: 2:42 - loss: 3.4265 - accuracy: 0.1815 63/638 [=>............................] - ETA: 2:42 - loss: 3.4284 - accuracy: 0.1806 64/638 [==>...........................] - ETA: 2:42 - loss: 3.4344 - accuracy: 0.1797 65/638 [==>...........................] - ETA: 2:41 - loss: 3.4470 - accuracy: 0.1798 66/638 [==>...........................] - ETA: 2:41 - loss: 3.4426 - accuracy: 0.1804 67/638 [==>...........................] - ETA: 2:40 - loss: 3.4319 - accuracy: 0.1814 68/638 [==>...........................] - ETA: 2:40 - loss: 3.4274 - accuracy: 0.1824 69/638 [==>...........................] - ETA: 2:40 - loss: 3.4222 - accuracy: 0.1816 70/638 [==>...........................] - ETA: 2:40 - loss: 3.4183 - accuracy: 0.1835 71/638 [==>...........................] - ETA: 2:39 - loss: 3.4153 - accuracy: 0.1818 72/638 [==>...........................] - ETA: 2:39 - loss: 3.4119 - accuracy: 0.1814 73/638 [==>...........................] - ETA: 2:39 - loss: 3.4205 - accuracy: 0.1819 74/638 [==>...........................] - ETA: 2:39 - loss: 3.4235 - accuracy: 0.1824 75/638 [==>...........................] - ETA: 2:38 - loss: 3.4219 - accuracy: 0.1829 76/638 [==>...........................] - ETA: 2:39 - loss: 3.4201 - accuracy: 0.1809 77/638 [==>...........................] - ETA: 2:39 - loss: 3.4234 - accuracy: 0.1802 78/638 [==>...........................] - ETA: 2:38 - loss: 3.4252 - accuracy: 0.1803 79/638 [==>...........................] - ETA: 2:38 - loss: 3.4272 - accuracy: 0.1804 80/638 [==>...........................] - ETA: 2:38 - loss: 3.4285 - accuracy: 0.1809 81/638 [==>...........................] - ETA: 2:37 - loss: 3.4192 - accuracy: 0.1825 82/638 [==>...........................] - ETA: 2:37 - loss: 3.4180 - accuracy: 0.1833 83/638 [==>...........................] - ETA: 2:37 - loss: 3.4117 - accuracy: 0.1834 84/638 [==>...........................] - ETA: 2:36 - loss: 3.4154 - accuracy: 0.1842 85/638 [==>...........................] - ETA: 2:36 - loss: 3.4148 - accuracy: 0.1835 86/638 [===>..........................] - ETA: 2:36 - loss: 3.4138 - accuracy: 0.1831 87/638 [===>..........................] - ETA: 2:35 - loss: 3.4093 - accuracy: 0.1839 88/638 [===>..........................] - ETA: 2:35 - loss: 3.4039 - accuracy: 0.1847 89/638 [===>..........................] - ETA: 2:35 - loss: 3.4032 - accuracy: 0.1847 \ No newline at end of file