{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import asyncio\n", "\n", "from de_quoi_parle_le_monde.storage import Storage\n", "import sparknlp\n", "from sparknlp.pretrained import PretrainedPipeline\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "24/03/27 18:32:53 WARN Utils: Your hostname, time resolves to a loopback address: 127.0.1.1; using 192.168.1.91 instead (on interface enp2s0)\n", "24/03/27 18:32:53 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ ":: loading settings :: url = jar:file:/home/theenglishway/Documents/dev/python/le_monde/.venv/lib/python3.11/site-packages/pyspark/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Ivy Default Cache set to: /home/theenglishway/.ivy2/cache\n", "The jars for the packages stored in: /home/theenglishway/.ivy2/jars\n", "com.johnsnowlabs.nlp#spark-nlp_2.12 added as a dependency\n", ":: resolving dependencies :: org.apache.spark#spark-submit-parent-dc6da606-5704-4c00-9fee-58c5afaaad72;1.0\n", "\tconfs: [default]\n", "\tfound com.johnsnowlabs.nlp#spark-nlp_2.12;5.3.2 in central\n", "\tfound com.typesafe#config;1.4.2 in central\n", "\tfound org.rocksdb#rocksdbjni;6.29.5 in central\n", "\tfound com.amazonaws#aws-java-sdk-s3;1.12.500 in central\n", "\tfound com.amazonaws#aws-java-sdk-kms;1.12.500 in central\n", "\tfound com.amazonaws#aws-java-sdk-core;1.12.500 in central\n", "\tfound commons-logging#commons-logging;1.1.3 in central\n", "\tfound commons-codec#commons-codec;1.15 in central\n", "\tfound org.apache.httpcomponents#httpclient;4.5.13 in central\n", "\tfound org.apache.httpcomponents#httpcore;4.4.13 in central\n", "\tfound software.amazon.ion#ion-java;1.0.2 in central\n", "\tfound joda-time#joda-time;2.8.1 in central\n", "\tfound com.amazonaws#jmespath-java;1.12.500 in central\n", "\tfound com.github.universal-automata#liblevenshtein;3.0.0 in central\n", "\tfound com.google.protobuf#protobuf-java-util;3.0.0-beta-3 in central\n", "\tfound com.google.protobuf#protobuf-java;3.0.0-beta-3 in central\n", "\tfound com.google.code.gson#gson;2.3 in central\n", "\tfound it.unimi.dsi#fastutil;7.0.12 in central\n", "\tfound org.projectlombok#lombok;1.16.8 in central\n", "\tfound com.google.cloud#google-cloud-storage;2.20.1 in central\n", "\tfound com.google.guava#guava;31.1-jre in central\n", "\tfound com.google.guava#failureaccess;1.0.1 in central\n", "\tfound com.google.guava#listenablefuture;9999.0-empty-to-avoid-conflict-with-guava in central\n", "\tfound com.google.errorprone#error_prone_annotations;2.18.0 in central\n", "\tfound com.google.j2objc#j2objc-annotations;1.3 in central\n", "\tfound com.google.http-client#google-http-client;1.43.0 in central\n", "\tfound io.opencensus#opencensus-contrib-http-util;0.31.1 in central\n", "\tfound com.google.http-client#google-http-client-jackson2;1.43.0 in central\n", "\tfound com.google.http-client#google-http-client-gson;1.43.0 in central\n", "\tfound com.google.api-client#google-api-client;2.2.0 in central\n", "\tfound com.google.oauth-client#google-oauth-client;1.34.1 in central\n", "\tfound com.google.http-client#google-http-client-apache-v2;1.43.0 in central\n", "\tfound com.google.apis#google-api-services-storage;v1-rev20220705-2.0.0 in central\n", "\tfound com.google.code.gson#gson;2.10.1 in central\n", "\tfound com.google.cloud#google-cloud-core;2.12.0 in central\n", "\tfound io.grpc#grpc-context;1.53.0 in central\n", "\tfound com.google.auto.value#auto-value-annotations;1.10.1 in central\n", "\tfound com.google.auto.value#auto-value;1.10.1 in central\n", "\tfound javax.annotation#javax.annotation-api;1.3.2 in central\n", "\tfound com.google.cloud#google-cloud-core-http;2.12.0 in central\n", "\tfound com.google.http-client#google-http-client-appengine;1.43.0 in central\n", "\tfound com.google.api#gax-httpjson;0.108.2 in central\n", "\tfound com.google.cloud#google-cloud-core-grpc;2.12.0 in central\n", "\tfound io.grpc#grpc-alts;1.53.0 in central\n", "\tfound io.grpc#grpc-grpclb;1.53.0 in central\n", "\tfound org.conscrypt#conscrypt-openjdk-uber;2.5.2 in central\n", "\tfound io.grpc#grpc-auth;1.53.0 in central\n", "\tfound io.grpc#grpc-protobuf;1.53.0 in central\n", "\tfound io.grpc#grpc-protobuf-lite;1.53.0 in central\n", "\tfound io.grpc#grpc-core;1.53.0 in central\n", "\tfound com.google.api#gax;2.23.2 in central\n", "\tfound com.google.api#gax-grpc;2.23.2 in central\n", "\tfound com.google.auth#google-auth-library-credentials;1.16.0 in central\n", "\tfound com.google.auth#google-auth-library-oauth2-http;1.16.0 in central\n", "\tfound com.google.api#api-common;2.6.2 in central\n", "\tfound io.opencensus#opencensus-api;0.31.1 in central\n", "\tfound com.google.api.grpc#proto-google-iam-v1;1.9.2 in central\n", "\tfound com.google.protobuf#protobuf-java;3.21.12 in central\n", "\tfound com.google.protobuf#protobuf-java-util;3.21.12 in central\n", "\tfound com.google.api.grpc#proto-google-common-protos;2.14.2 in central\n", "\tfound org.threeten#threetenbp;1.6.5 in central\n", "\tfound com.google.api.grpc#proto-google-cloud-storage-v2;2.20.1-alpha in central\n", "\tfound com.google.api.grpc#grpc-google-cloud-storage-v2;2.20.1-alpha in central\n", "\tfound com.google.api.grpc#gapic-google-cloud-storage-v2;2.20.1-alpha in central\n", "\tfound com.google.code.findbugs#jsr305;3.0.2 in central\n", "\tfound io.grpc#grpc-api;1.53.0 in central\n", "\tfound io.grpc#grpc-stub;1.53.0 in central\n", "\tfound org.checkerframework#checker-qual;3.31.0 in central\n", "\tfound io.perfmark#perfmark-api;0.26.0 in central\n", "\tfound com.google.android#annotations;4.1.1.4 in central\n", "\tfound org.codehaus.mojo#animal-sniffer-annotations;1.22 in central\n", "\tfound io.opencensus#opencensus-proto;0.2.0 in central\n", "\tfound io.grpc#grpc-services;1.53.0 in central\n", "\tfound com.google.re2j#re2j;1.6 in central\n", "\tfound io.grpc#grpc-netty-shaded;1.53.0 in central\n", "\tfound io.grpc#grpc-googleapis;1.53.0 in central\n", "\tfound io.grpc#grpc-xds;1.53.0 in central\n", "\tfound com.navigamez#greex;1.0 in central\n", "\tfound dk.brics.automaton#automaton;1.11-8 in central\n", "\tfound com.johnsnowlabs.nlp#tensorflow-cpu_2.12;0.4.4 in central\n", "\tfound com.microsoft.onnxruntime#onnxruntime;1.17.0 in central\n", ":: resolution report :: resolve 5622ms :: artifacts dl 120ms\n", "\t:: modules in use:\n", "\tcom.amazonaws#aws-java-sdk-core;1.12.500 from central in [default]\n", "\tcom.amazonaws#aws-java-sdk-kms;1.12.500 from central in [default]\n", "\tcom.amazonaws#aws-java-sdk-s3;1.12.500 from central in [default]\n", "\tcom.amazonaws#jmespath-java;1.12.500 from central in [default]\n", "\tcom.github.universal-automata#liblevenshtein;3.0.0 from central in [default]\n", "\tcom.google.android#annotations;4.1.1.4 from central in [default]\n", "\tcom.google.api#api-common;2.6.2 from central in [default]\n", "\tcom.google.api#gax;2.23.2 from central in [default]\n", "\tcom.google.api#gax-grpc;2.23.2 from central in [default]\n", "\tcom.google.api#gax-httpjson;0.108.2 from central in [default]\n", "\tcom.google.api-client#google-api-client;2.2.0 from central in [default]\n", "\tcom.google.api.grpc#gapic-google-cloud-storage-v2;2.20.1-alpha from central in [default]\n", "\tcom.google.api.grpc#grpc-google-cloud-storage-v2;2.20.1-alpha from central in [default]\n", "\tcom.google.api.grpc#proto-google-cloud-storage-v2;2.20.1-alpha from central in [default]\n", "\tcom.google.api.grpc#proto-google-common-protos;2.14.2 from central in [default]\n", "\tcom.google.api.grpc#proto-google-iam-v1;1.9.2 from central in [default]\n", "\tcom.google.apis#google-api-services-storage;v1-rev20220705-2.0.0 from central in [default]\n", "\tcom.google.auth#google-auth-library-credentials;1.16.0 from central in [default]\n", "\tcom.google.auth#google-auth-library-oauth2-http;1.16.0 from central in [default]\n", "\tcom.google.auto.value#auto-value;1.10.1 from central in [default]\n", "\tcom.google.auto.value#auto-value-annotations;1.10.1 from central in [default]\n", "\tcom.google.cloud#google-cloud-core;2.12.0 from central in [default]\n", "\tcom.google.cloud#google-cloud-core-grpc;2.12.0 from central in [default]\n", "\tcom.google.cloud#google-cloud-core-http;2.12.0 from central in [default]\n", "\tcom.google.cloud#google-cloud-storage;2.20.1 from central in [default]\n", "\tcom.google.code.findbugs#jsr305;3.0.2 from central in [default]\n", "\tcom.google.code.gson#gson;2.10.1 from central in [default]\n", "\tcom.google.errorprone#error_prone_annotations;2.18.0 from central in [default]\n", "\tcom.google.guava#failureaccess;1.0.1 from central in [default]\n", "\tcom.google.guava#guava;31.1-jre from central in [default]\n", "\tcom.google.guava#listenablefuture;9999.0-empty-to-avoid-conflict-with-guava from central in [default]\n", "\tcom.google.http-client#google-http-client;1.43.0 from central in [default]\n", "\tcom.google.http-client#google-http-client-apache-v2;1.43.0 from central in [default]\n", "\tcom.google.http-client#google-http-client-appengine;1.43.0 from central in [default]\n", "\tcom.google.http-client#google-http-client-gson;1.43.0 from central in [default]\n", "\tcom.google.http-client#google-http-client-jackson2;1.43.0 from central in [default]\n", "\tcom.google.j2objc#j2objc-annotations;1.3 from central in [default]\n", "\tcom.google.oauth-client#google-oauth-client;1.34.1 from central in [default]\n", "\tcom.google.protobuf#protobuf-java;3.21.12 from central in [default]\n", "\tcom.google.protobuf#protobuf-java-util;3.21.12 from central in [default]\n", "\tcom.google.re2j#re2j;1.6 from central in [default]\n", "\tcom.johnsnowlabs.nlp#spark-nlp_2.12;5.3.2 from central in [default]\n", "\tcom.johnsnowlabs.nlp#tensorflow-cpu_2.12;0.4.4 from central in [default]\n", "\tcom.microsoft.onnxruntime#onnxruntime;1.17.0 from central in [default]\n", "\tcom.navigamez#greex;1.0 from central in [default]\n", "\tcom.typesafe#config;1.4.2 from central in [default]\n", "\tcommons-codec#commons-codec;1.15 from central in [default]\n", "\tcommons-logging#commons-logging;1.1.3 from central in [default]\n", "\tdk.brics.automaton#automaton;1.11-8 from central in [default]\n", "\tio.grpc#grpc-alts;1.53.0 from central in [default]\n", "\tio.grpc#grpc-api;1.53.0 from central in [default]\n", "\tio.grpc#grpc-auth;1.53.0 from central in [default]\n", "\tio.grpc#grpc-context;1.53.0 from central in [default]\n", "\tio.grpc#grpc-core;1.53.0 from central in [default]\n", "\tio.grpc#grpc-googleapis;1.53.0 from central in [default]\n", "\tio.grpc#grpc-grpclb;1.53.0 from central in [default]\n", "\tio.grpc#grpc-netty-shaded;1.53.0 from central in [default]\n", "\tio.grpc#grpc-protobuf;1.53.0 from central in [default]\n", "\tio.grpc#grpc-protobuf-lite;1.53.0 from central in [default]\n", "\tio.grpc#grpc-services;1.53.0 from central in [default]\n", "\tio.grpc#grpc-stub;1.53.0 from central in [default]\n", "\tio.grpc#grpc-xds;1.53.0 from central in [default]\n", "\tio.opencensus#opencensus-api;0.31.1 from central in [default]\n", "\tio.opencensus#opencensus-contrib-http-util;0.31.1 from central in [default]\n", "\tio.opencensus#opencensus-proto;0.2.0 from central in [default]\n", "\tio.perfmark#perfmark-api;0.26.0 from central in [default]\n", "\tit.unimi.dsi#fastutil;7.0.12 from central in [default]\n", "\tjavax.annotation#javax.annotation-api;1.3.2 from central in [default]\n", "\tjoda-time#joda-time;2.8.1 from central in [default]\n", "\torg.apache.httpcomponents#httpclient;4.5.13 from central in [default]\n", "\torg.apache.httpcomponents#httpcore;4.4.13 from central in [default]\n", "\torg.checkerframework#checker-qual;3.31.0 from central in [default]\n", "\torg.codehaus.mojo#animal-sniffer-annotations;1.22 from central in [default]\n", "\torg.conscrypt#conscrypt-openjdk-uber;2.5.2 from central in [default]\n", "\torg.projectlombok#lombok;1.16.8 from central in [default]\n", "\torg.rocksdb#rocksdbjni;6.29.5 from central in [default]\n", "\torg.threeten#threetenbp;1.6.5 from central in [default]\n", "\tsoftware.amazon.ion#ion-java;1.0.2 from central in [default]\n", "\t:: evicted modules:\n", "\tcommons-logging#commons-logging;1.2 by [commons-logging#commons-logging;1.1.3] in [default]\n", "\tcommons-codec#commons-codec;1.11 by [commons-codec#commons-codec;1.15] in [default]\n", "\tcom.google.protobuf#protobuf-java-util;3.0.0-beta-3 by [com.google.protobuf#protobuf-java-util;3.21.12] in [default]\n", "\tcom.google.protobuf#protobuf-java;3.0.0-beta-3 by [com.google.protobuf#protobuf-java;3.21.12] in [default]\n", "\tcom.google.code.gson#gson;2.3 by [com.google.code.gson#gson;2.10.1] in [default]\n", "\t---------------------------------------------------------------------\n", "\t| | modules || artifacts |\n", "\t| conf | number| search|dwnlded|evicted|| number|dwnlded|\n", "\t---------------------------------------------------------------------\n", "\t| default | 83 | 8 | 8 | 5 || 78 | 0 |\n", "\t---------------------------------------------------------------------\n", "\n", ":: problems summary ::\n", ":::: ERRORS\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\tunknown resolver null\n", "\n", "\n", ":: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS\n", ":: retrieving :: org.apache.spark#spark-submit-parent-dc6da606-5704-4c00-9fee-58c5afaaad72\n", "\tconfs: [default]\n", "\t0 artifacts copied, 78 already retrieved (0kB/35ms)\n", "24/03/27 18:33:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n", "Setting default log level to \"WARN\".\n", "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n" ] } ], "source": [ "storage = await Storage.create()\n", "spark = sparknlp.start()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[(1, 'https://francetvinfo.fr'),\n", " (2, 'https://cnews.fr'),\n", " (5, 'https://lemonde.fr')]" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "await storage.select_from(\"sites\")" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "phrases = [[v[5]] for v in await storage.select_from(\"top_articles_view\")]" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[[\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " [\"Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud... Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"],\n", " [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"],\n", " [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024'],\n", " ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " ['Sciences Po Paris : \"Il n\\'y a pas eu de propos antisémites\", assure une membre du comité Sciences Po pour la Palestine'],\n", " ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe'],\n", " ['Interview d\\'Emmanuel Macron : \"Le président fait peur\" et souffle \"sur les braises d’un potentiel conflit mondial à des fins électorales\" selon les oppositions'],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " [\"JO 2024 : les habitants de HLM s'exposeront à une expulsion et 9.000 euros d’amende en cas de sous-location\"],\n", " ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent'],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie et l’Ukraine disent avoir déjoué plusieurs attaques de drones dans la nuit'],\n", " ['En direct, présidentielle en Russie\\xa0: Vladimir Poutine en route vers une réélection, après deux ans de guerre en Ukraine'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: nouvelles attaques de drones ukrainiens contre les régions de Belgorod et de Koursk, dans le sud de la Russie'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent'],\n", " ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " [\"Jean-Michel Cohen : «Je suis millionnaire... Ceux qui ne sont pas contents : 'bossez autant que moi !'», estime le nutritionniste chez Jordan de Luxe\"],\n", " ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['En direct, interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['Interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " ['L’étrange genèse du livre de Jordan Bardella'],\n", " ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: des combats signalés entre soldats russes et groupes pro-ukrainiens dans les régions russes de Belgorod et Koursk'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: les combattants russes pro-ukrainiens disent lancer «\\xa0des frappes massives\\xa0» dans les régions de Koursk et de Belgorod'],\n", " ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»'],\n", " ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»'],\n", " ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['L’étrange genèse du livre de Jordan Bardella'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux'],\n", " ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: bombardements à Belgorod, mort de deux civils... Le point sur la situation'],\n", " ['Tirage au sort de la Ligue des champions : le PSG face au FC Barcelone en quarts de finale... Suivez et commentez avec nous'],\n", " ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"],\n", " ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers'],\n", " ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " ['BFM-TV et RMC vendus à l’armateur Rodolphe Saadé'],\n", " ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale'],\n", " ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " [\"On vous explique la polémique autour d'une mobilisation pro-palestinienne à Sciences Po Paris\"],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale'],\n", " ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: «\\xa0D’un point de\\xa0vue militaro-technique, nous sommes évidemment prêts\\xa0» à\\xa0une guerre nucléaire, rappelle Vladimir Poutine'],\n", " ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère'],\n", " ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers'],\n", " ['«\\xa0La taxe proposée par la Commission européenne rapporterait chaque année jusqu’à 57\\xa0milliards d’euros\\xa0»'],\n", " [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe'],\n", " [\"Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud... Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"],\n", " ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " [\"Seine-Saint-Denis : collision entre deux jeunes à scooter et une voiture de police après un refus d'obtempérer, le conducteur est mort\"],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " ['La situation des finances publiques en France est «\\xa0préoccupante\\xa0», juge la Cour des comptes'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['A la veille de la présidentielle, le sud de la Russie visé par des bombardements et des incursions armées'],\n", " [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère'],\n", " ['Intelligence artificielle\\xa0: un plan d’action pour placer la France «\\xa0à la pointe\\xa0»'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " [\"Stéphane Plaza sera jugé devant le tribunal correctionnel à la fin de l'été 2024\"],\n", " ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"],\n", " ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019'],\n", " ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires'],\n", " ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " ['Madeleine Chapsal, journaliste et écrivaine, est morte à l’âge de 98\\xa0ans'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: les sénateurs votent en faveur de l’accord bilatéral à une large majorité'],\n", " [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " ['Présidentielle en Russie : Vladimir Poutine demande aux électeurs de se rendre aux urnes pour \"décider de l\\'avenir de la patrie\"'],\n", " ['Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel'],\n", " [\"Médias : le groupe CMA CGM rachète BFMTV et RMC pour 1,55 milliard d'euros\"],\n", " ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " ['Elections européennes 2024\\xa0: la réhabilitation de Thierry Mariani, un proche du Kremlin devenu voix du RN'],\n", " ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " ['«\\xa0La taxe sur les transactions financières proposée par la Commission européenne rapporterait chaque année jusqu’à 57 milliards d’euros\\xa0»'],\n", " ['Punaises de\\xa0lit et\\xa0immigration\\xa0: l’Arcom met en\\xa0garde la\\xa0chaîne CNews'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: des unités russes pro ukrainiennes affirment mener des attaques à Koursk et Belgorod, malgré les démentis de Moscou'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: la production réduite de moitié à la raffinerie de Kstovo, endommagée par des drones, selon Reuters'],\n", " ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['«\\xa0Anatomie d’une chute\\xa0» poursuit son incroyable parcours avec l’Oscar du meilleur scénario original'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Guerre en Ukraine : Vladimir Poutine accuse Kiev d\\'attaquer les régions russes pour \"tenter d\\'empêcher\" la présidentielle'],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " ['Ukraine : en assumant une \"ambiguïté stratégique\", Emmanuel Macron \"a permis une prise de conscience de l\\'ampleur du sujet\", selon Jean-Yves Le Drian'],\n", " ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe'],\n", " ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays'],\n", " ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: au moins dix régions russes attaquées, selon l’agence TASS'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie'],\n", " ['Étudiante juive refusée dans un amphithéâtre : \"Cette personne nous a filmés de manière ciblée\", explique un membre du Comité Palestine'],\n", " [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires'],\n", " ['Guerre en Ukraine\\xa0: entre Jordan Bardella et Marine Le Pen, une différence de forme'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['Alerte aux crues, vagues-submersion\\xa0: deux enfants de 4\\xa0et 13\\xa0ans portés disparus dans le Gard, cinq\\xa0corps retrouvés'],\n", " ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération'],\n", " ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " ['Guerre en Ukraine : \"Nous ne devons pas laisser la Russie gagner\", martèle Emmanuel Macron'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['En direct, guerre en Ukraine\\xa0: les premiers avions de chasse F-16 ukrainiens devraient être en service en juillet'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»'],\n", " ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " ['Un Boeing 787 de Latam Airlines rencontre un «\\xa0problème technique\\xa0» au-dessus de la Nouvelle-Zélande\\xa0; cinquante personnes blessées'],\n", " ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays'],\n", " ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération'],\n", " ['Espace : revivez le décollage réussi de Starship, avant la perte du vaisseau lors de son troisième vol test en redescendant vers la Terre'],\n", " ['En direct, guerre en Ukraine\\xa0: un soutien direct de l’OTAN à l’Ukraine ne serait pas contraire aux règles internationales, selon le président tchèque'],\n", " ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['Débat sur le soutien à l\\'Ukraine : Marine Le Pen dénonce \"les annonces guerrières d\\'Emmanuel Macron\" sur un possible envoi de troupes au sol'],\n", " ['L’amiral Philippe de Gaulle, fils du Général, est mort'],\n", " ['Les salariés en arrêt-maladie ont désormais droit à quatre semaines de congés payés'],\n", " ['Criminalité : un quartier de Rennes traumatisé après une nuit de fusillade'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Les attaques du Hamas en Israël, un «\\xa0acte de résistance\\xa0»\\xa0? La philosophe Judith Butler ravive la polémique à gauche'],\n", " ['Adaptation au réchauffement climatique : six choses à retenir du rapport de la Cour des comptes'],\n", " ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " ['\"C\\'est trop tard pour faire autrement\"\\xa0: la majorité divisée sur la stratégie anti-RN pour les élections européennes'],\n", " ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie'],\n", " ['Oscars\\xa02024\\xa0: revivez la cérémonie avec le triomphe d’«\\xa0Oppenheimer\\xa0», et retrouvez le décryptage de notre journaliste'],\n", " ['Débat sur l’Ukraine au Parlement\\xa0: LFI annonce voter contre la stratégie française d’aide à Kiev, le RN s’abstiendra'],\n", " ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " ['Sciences Po Paris : \"Il n\\'y a pas eu de propos antisémites\", assure une membre du comité Sciences Po pour la Palestine'],\n", " [\"«Nous ne voulons pas assassiner la paix en Europe» : Fabien Roussel annonce qu'il votera contre le plan de soutien de la France à l'Ukraine\"],\n", " ['En direct, Oscars\\xa02024\\xa0: le prix du meilleur scénario original pour «\\xa0Anatomie d’une chute\\xa0», «\\xa0Pauvres Créatures\\xa0», meilleurs décors, costumes et maquillages'],\n", " [\"Soutien à l'Ukraine à l'Assemblée nationale : les députés de La France insoumise voteront contre lors du débat\"],\n", " ['Oscars\\xa02024\\xa0: revivez la cérémonie avec le triomphe d’«\\xa0Oppenheimer\\xa0», et retrouvez le décryptage de notre journaliste'],\n", " ['La situation des finances publiques en France est «\\xa0préoccupante\\xa0», juge la Cour des comptes'],\n", " [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " ['Oscars 2024 : le triomphe d\\'\"Oppenheimer\", \"Anatomie d\\'une chute\" primé, le show de Ryan Gosling... Ce qu\\'il faut retenir de la 96e cérémonie'],\n", " ['Gaza\\xa0: plus d’enfants ont été tués dans la bande de Gaza «\\xa0en quatre mois\\xa0» qu’en quatre ans de guerre dans le monde entier, alerte l’ONU'],\n", " ['MMA : voici les conditions demandées par «Baki» pour une revanche face à Cédric Doumbè'],\n", " ['Fin de vie\\xa0: Emmanuel Macron endosse le projet de loi sur l’«\\xa0aide à mourir\\xa0»'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " ['Le tueur en série \"Le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019'],\n", " ['Xavier Dupont de Ligonnès : sa sœur Christine donne dans un livre une version \"fantasmagorique\" de l\\'affaire'],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " ['Oscars\\xa02024\\xa0: revivez la cérémonie avec le triomphe d’«\\xa0Oppenheimer\\xa0», et retrouvez le décryptage de notre journaliste'],\n", " ['Elections européennes 2024\\xa0: la réhabilitation de Thierry Mariani, un proche du Kremlin devenu voix du RN'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: David Cameron se dit opposé à l’envoi de troupes occidentales en Ukraine, même pour de la formation'],\n", " ['Oscars 2024 : le triomphe d\\'\"Oppenheimer\", \"Anatomie d\\'une chute\" primé, le show de Ryan Gosling... Ce qu\\'il faut retenir de la 96e cérémonie'],\n", " [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Mathias Vicherat, une démission de la direction de Sciences po dans un climat de tension'],\n", " ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024'],\n", " [\"L'amiral Philippe de Gaulle, fils du général, est mort à l'âge de 102 ans\"],\n", " [\"«Nous ne voulons pas assassiner la paix en Europe» : Fabien Roussel annonce qu'il votera contre le plan de soutien de la France à l'Ukraine\"],\n", " ['Guerre en Ukraine : les clés pour comprendre le vote du Parlement sur le soutien de la France à Kiev'],\n", " ['La situation des finances publiques en France est «\\xa0préoccupante\\xa0», juge la Cour des comptes'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " ['Guerre en Ukraine : qu\\'est-ce que \"l\\'ambiguïté stratégique\", cette doctrine militaire brandie par Emmanuel Macron ?'],\n", " ['Guerre en\\xa0Ukraine\\xa0: les informations à retenir de la semaine du 3\\xa0au 10\\xa0mars\\xa02024'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Groupes de niveau au collège : \"Il n\\'y a plus que Gabriel Attal dans ce pays qui est pour\", dénonce Sophie Vénétitay, secrétaire générale du SNES-FSU'],\n", " ['Plusieurs services de l\\'Etat sont visés par des attaques informatiques d\\'une \"intensité inédite\", signale le gouvernement'],\n", " ['Oscars 2024\\xa0: «\\xa0Oppenheimer\\xa0» et Christopher Nolan triomphent à Hollywood, «\\xa0Anatomie d’une chute\\xa0» récompensé du meilleur scénario original'],\n", " ['Le marché français de la musique pénalisé par la faiblesse des abonnements en streaming'],\n", " ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " [\"Guerre en Ukraine : Moscou affirme avoir repoussé des incursions de combattants russes pro-Kiev, à trois jours de l'élection présidentielle\"],\n", " [\"«Nous ne voulons pas assassiner la paix en Europe» : Fabien Roussel annonce qu'il votera contre le plan de soutien de la France à l'Ukraine\"],\n", " ['MMA : voici les conditions demandées par «Baki» pour une revanche face à Cédric Doumbè'],\n", " ['Mort de Thomas à Crépol\\xa0: onze personnes interpellées'],\n", " ['Guerre en Ukraine : les Etats-Unis ne voient aucun signe que la Russie se prépare à utiliser une arme nucléaire en Ukraine, selon la Maison Blanche'],\n", " ['L’étrange genèse du livre de Jordan Bardella'],\n", " [\"Dépression Monica : la femme qui accompagnait l'homme découvert mort dans le fleuve Hérault retrouvée saine et sauve\"],\n", " ['Le tabou du viol des hommes par des femmes\\xa0: «\\xa0Je ne savais pas comment lui dire que je ne voulais pas coucher avec elle\\xa0»'],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " ['Alerte aux crues, vagues et submersion\\xa0: un père et ses deux enfants portés disparus, trois\\xa0corps retrouvés dans le Gard'],\n", " ['Adaptation au réchauffement climatique : six choses à retenir du rapport de la Cour des comptes'],\n", " ['Oscars 2024\\xa0: «\\xa0Oppenheimer\\xa0» et Christopher Nolan triomphent à Hollywood, «\\xa0Anatomie d’une chute\\xa0» récompensé du meilleur scénario original'],\n", " ['Madeleine Chapsal, journaliste et écrivaine, est morte à l’âge de 98\\xa0ans'],\n", " [\"«Nous ne voulons pas assassiner la paix en Europe» : Fabien Roussel annonce qu'il votera contre le plan de soutien de la France à l'Ukraine\"],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['Le tabou du viol des hommes par des femmes\\xa0: «\\xa0Je ne savais pas comment lui dire que je ne voulais pas coucher avec elle\\xa0»'],\n", " ['«\\xa0Aujourd’hui, les Etats-Unis ne dirigent pas “depuis l’arrière\\xa0: ils ne dirigent pas, tout court\\xa0»'],\n", " ['CAF : voici ce qui est obligatoire pour se connecter sur son espace personnel depuis vendredi dernier'],\n", " ['Mort de Thomas à Crépol\\xa0: onze personnes interpellées'],\n", " ['Tempête Monica : une dizaine de départements français en vigilance orange'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Soupçonné de violences conjugales, Mathias Vicherat annonce sa démission de son poste de directeur de Sciences Po Paris'],\n", " ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " ['Ce que l\\'on sait de la cyberattaque qui a touché France Travail et concerne \"potentiellement\" 43 millions de personnes'],\n", " ['Alerte aux crues, vagues-submersion\\xa0: deux enfants portés disparus, quatre\\xa0corps retrouvés dans le Gard'],\n", " ['La philosophe Judith Butler ravive la polémique à gauche sur les attaques du Hamas en Israël, qu’elle qualifie d’«\\xa0acte de résistance\\xa0»\\xa0?'],\n", " ['Marseille\\xa0: le chef du gang Yoda, l’un des deux principaux clans du narcobanditisme de la ville, arrêté au Maroc'],\n", " [\"Portugal, Pays-Bas, Italie... Visualisez la progression des partis d'extrême droite en Europe lors des législatives depuis 2010\"],\n", " ['CAF : voici ce qui est obligatoire pour se connecter sur son espace personnel depuis vendredi dernier'],\n", " [\"Inondations dans le Sud\\xa0: le corps d'une fillette de 4\\xa0ans découvert dans le Gard, son frère toujours porté disparu\"],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le chef de la diplomatie d’Ukraine dit que «\\xa0la stratégie consistant à fournir de l’aide au compte-gouttes ne fonctionne plus\\xa0»'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " [\"Intempéries : ce que l'on sait sur les sept disparus dans le Gard et en Ardèche\"],\n", " ['Gaza\\xa0: plus d’enfants ont été tués dans la bande de Gaza «\\xa0en seulement quatre mois\\xa0» qu’en quatre ans de guerre dans le monde entier, alerte l’ONU'],\n", " ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " [\"Fin de vie\\xa0: à quoi ressemble le parcours d'aide à mourir voulu par Emmanuel Macron, étape par étape\\xa0?\"],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " ['Six nations 2024 : Irlande, Angleterre, Écosse et France... Quels scénarios de sacre pour les quatre prétendants à la victoire finale'],\n", " ['Madeleine Chapsal, journaliste et écrivaine, est morte à l’âge de 98\\xa0ans'],\n", " ['CAF : voici ce qui est obligatoire pour se connecter sur son espace personnel depuis vendredi dernier'],\n", " ['Intempéries : deux nouveaux corps sans vie retrouvés à Goudargues dans le Gard, portant le bilan provisoire à trois morts'],\n", " ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie dit avoir fait face à une «\\xa0attaque massive\\xa0» de drones ukrainiens'],\n", " ['Marseille\\xa0: le chef du gang Yoda, l’un des deux principaux clans du narcobanditisme de la ville, arrêté au Maroc'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " ['Oscars 2024 : le triomphe d\\'\"Oppenheimer\", \"Anatomie d\\'une chute\" primé, le show de Ryan Gosling... Ce qu\\'il faut retenir de la 96e cérémonie'],\n", " ['Elections européennes 2024\\xa0: la spectaculaire rétraction du macronisme'],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " ['Gaza\\xa0: un navire d’aide humanitaire avec 200\\xa0tonnes d’eau et de nourriture a quitté Chypre'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " ['CAF : voici ce qui est obligatoire pour se connecter sur son espace personnel depuis vendredi dernier'],\n", " ['En Côte d’Ivoire, la lagune Ebrié asphyxiée par la pollution'],\n", " ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: David Cameron se dit opposé à l’envoi de troupes occidentales en Ukraine, même pour de la formation'],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['«\\xa0Aujourd’hui, les Etats-Unis ne dirigent pas “depuis l’arrière”\\xa0: ils ne dirigent pas, tout court\\xa0»'],\n", " ['Plusieurs services de l\\'Etat sont visés par des attaques informatiques d\\'une \"intensité inédite\", signale le gouvernement'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['En direct, guerre en Ukraine\\xa0: Kiev réaffirme qu’elle ne hissera «\\xa0jamais\\xa0» le drapeau blanc après l’appel du pape à négocier'],\n", " ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " ['Guerre Israël-Hamas\\xa0: des dizaines de morts à Gaza après des raids israéliens menés à la veille du ramadan'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: David Cameron se dit opposé à l’envoi de troupes occidentales en Ukraine, même pour de la formation'],\n", " ['Méga-camions : les routes françaises ne sont \"pas du tout adaptées\", prévient l\\'eurodéputée écologiste Karima Delli'],\n", " ['Elections européennes 2024\\xa0: la spectaculaire rétraction du macronisme'],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['Punaises de\\xa0lit et\\xa0immigration\\xa0: l’Arcom met en\\xa0garde la\\xa0chaîne CNews'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " [\"Portugal, Pays-Bas, Italie... Visualisez la progression des partis d'extrême droite en Europe lors des législatives depuis 2010\"],\n", " ['En direct, guerre en Ukraine\\xa0: l’appel du pape à avoir «\\xa0le courage de hisser le drapeau blanc et de négocier\\xa0» fait réagir'],\n", " ['Mathias Vicherat, une démission de la direction de Sciences po dans un climat de tension'],\n", " ['«\\xa0Les Allemands en sont venus à oublier que la dénazification fut une entreprise incomplète et aux effets ambigus\\xa0»'],\n", " [\"Un bénéficiaire du RSA conditionné sur deux retrouve-t-il un emploi comme l'affirme Gabriel Attal ?\"],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " [\"Fin de vie\\xa0: à quoi ressemble le parcours d'aide à mourir voulu par Emmanuel Macron, étape par étape\\xa0?\"],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le point sur la situation'],\n", " ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['«\\xa0Nous appelons Airbus, Safran, Air France, Aéroports de Paris, à envisager publiquement une réduction du trafic\\xa0aérien\\xa0»'],\n", " ['«\\xa0Le choix d’Aya Nakamura pour la cérémonie d’ouverture des Jeux olympiques de Paris 2024\\xa0soulève des enjeux politiques qui la dépassent\\xa0»'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['Kate Middleton : retouchée, la photo de la princesse après son opération retirée par des agences de presse'],\n", " ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " [\"Uber, Deliveroo, Bolt… Ce que va changer la directive sur les travailleurs des plateformes numériques adoptée par le Conseil de l'UE\"],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Isabelle Saporta, patronne des éditions Fayard, va être licenciée'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: seize drones abattus dans la\\xa0nuit en\\xa0Russie, selon le\\xa0ministère de\\xa0la\\xa0défense'],\n", " ['Sauter le repas du midi : voici les effets sur la santé'],\n", " [\"Un bénéficiaire du RSA conditionné sur deux retrouve-t-il un emploi comme l'affirme Gabriel Attal ?\"],\n", " ['C8\\xa0et CNews\\xa0: plus de 40 rappels à l’ordre de l’Arcom, dont la moitié depuis trois ans'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " [\"Narcotrafic à Marseille : Félix Bingui, le chef du clan Yoda, l'un des plus puissants gangs de la ville, arrêté au Maroc\"],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " ['\"Dans la peau de Blanche Houellebecq\" : Guillaume Nicloux retrouve pour la troisième\\xa0fois le romancier dans un anti-biopic savoureux'],\n", " ['Punaises de\\xa0lit et\\xa0immigration\\xa0: l’Arcom met en\\xa0garde la\\xa0chaîne CNews'],\n", " [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " ['Six nations 2024 : joueurs et dominateurs, les Bleus écrasent le pays de Galles... Revivez le festival offensif du XV de France à Cardiff'],\n", " ['Tempête Monica : une dizaine de départements français en vigilance orange'],\n", " ['Isabelle Saporta, patronne des éditions Fayard, va être licenciée'],\n", " ['«\\xa0Le choix d’Aya Nakamura pour la cérémonie d’ouverture des Jeux olympiques de Paris 2024\\xa0soulève des enjeux politiques qui la dépassent\\xa0»'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le président ukrainien Zelensky est arrivé en Turquie, où seront évoqués des «\\xa0projets communs dans le domaine de l’industrie de défense\\xa0»'],\n", " ['Sauter le repas du midi : voici les effets sur la santé'],\n", " ['Présidentielle en Russie : Vladimir Poutine demande aux électeurs de se rendre aux urnes pour \"décider de l\\'avenir de la patrie\"'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['Elections européennes 2024 : le Rassemblement national distance toujours largement la majorité dans les intentions de vote, selon un sondage'],\n", " ['Cinq légionnaires devant la justice pour avoir organisé la prostitution de jeunes femmes dans vingt-trois\\xa0villes françaises'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Les mesures d’adaptation des villes au dérèglement climatique sont «\\xa0insuffisantes\\xa0», déplore la Cour des comptes'],\n", " [\"Uber, Deliveroo, Bolt… Ce que va changer la directive sur les travailleurs des plateformes numériques adoptée par le Conseil de l'UE\"],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " ['Le créateur du célèbre manga \"Dragon Ball\", Akira Toriyama, est mort à l\\'âge de 68 ans'],\n", " ['Guerre Israël-Hamas, jour 157\\xa0: premier jour de ramadan dans la bande de Gaza, sous les bombardements israéliens'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le chef de la diplomatie d’Ukraine dit que «\\xa0la stratégie consistant à fournir de l’aide au compte-gouttes ne fonctionne plus\\xa0»'],\n", " ['Xavier Dupont de Ligonnès : sa sœur Christine donne dans un livre une version \"fantasmagorique\" de l\\'affaire'],\n", " ['Isabelle Saporta, patronne des éditions Fayard, va être licenciée'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['Menaces de grève sur les JO 2024 : \"Si on avait écouté les syndicats\" avant, \"on n\\'en serait pas venu à cette tragicomédie\", fustige Bernard Thibault'],\n", " ['Intelligence artificielle\\xa0: un plan d’action pour placer la France «\\xa0à la pointe\\xa0»'],\n", " ['Oscars 2024 : le triomphe d\\'\"Oppenheimer\", \"Anatomie d\\'une chute\" primé, le show de Ryan Gosling... Ce qu\\'il faut retenir de la 96e cérémonie'],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " ['Alerte aux crues, vagues et submersion\\xa0: plusieurs personnes portées disparues, trois\\xa0corps retrouvés dans le Gard'],\n", " ['\"Franchement, qui aime faire le ménage\\xa0?\"\\xa0: les\\xa0femmes appelées à une grève du travail domestique pour le 8-Mars'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: après l’attaque russe sur Odessa, la présidente de Géorgie exhorte Washington à débloquer son programme d’aide militaire à Kiev'],\n", " [\"Russie : les attaques en provenance d'Ukraine se multiplient\"],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['Paris : un chauffeur de bus RATP en garde à vue après avoir écrasé le bras d’un sans domicile fixe'],\n", " ['«\\xa0Nous appelons Airbus, Safran, Air France, Aéroports de Paris, à envisager publiquement une réduction du trafic\\xa0aérien\\xa0»'],\n", " ['«\\xa0L’Allemagne et la France tirent deux conclusions totalement différentes de la guerre en Ukraine\\xa0»'],\n", " ['La journaliste et romancière Madeleine Chapsal, autrice de \"L\\'Homme de ma vie\", est morte à 98\\xa0ans'],\n", " ['Cinq légionnaires devant la justice pour avoir organisé la prostitution de jeunes femmes dans vingt-trois\\xa0villes françaises'],\n", " ['Santé : le ministre Frédéric Valletoux souhaite ouvrir une discussion sur la liste des affections longue durée, qui \"date des années 1980\"'],\n", " ['Guerre en Ukraine\\xa0: entre Jordan Bardella et Marine Le Pen, une différence de forme'],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " [\"Kate Middleton : soupçonnée d'être retouchée, la photo de la princesse après son opération retirée par des agences de presse\"],\n", " [\"Les guerres à Gaza et en Ukraine, Donald Trump, son âge... Ce qu'il faut retenir du discours sur l'état de l'Union prononcé par Joe Biden\"],\n", " ['Alerte aux crues, vagues et submersion\\xa0: plusieurs personnes portées disparues, trois\\xa0corps retrouvés dans le Gard'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: «\\xa0Le conflit en Ukraine pourrait dégénérer en une guerre à grande échelle en Europe\\xa0», prévient la Russie'],\n", " ['Un plan d’action pour placer la France «\\xa0à la pointe\\xa0» de l’intelligence artificielle'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['En Italie, l’opposition à Giorgia Meloni rate le doublé aux élections régionales'],\n", " ['Irlande : victoire du \"non\" au référendum sur la place des femmes et la famille'],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " [\"Guerre entre Israël et le Hamas : la dessinatrice Coco menacée de mort après la publication d'un dessin sur Gaza\"],\n", " ['France Télévisions\\xa0: l’ancien président du CSA reconnaît publiquement avoir reçu une suggestion de François Hollande'],\n", " [\"Guerre en Ukraine : ce que l'on sait des attaques de volontaires russes pro-Kiev sur le territoire de la Russie\"],\n", " ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " ['Pluie, vent, crues... neuf départements de la moitié sud désormais concernés par une vigilance orange'],\n", " ['Oscars 2024\\xa0: «\\xa0Oppenheimer\\xa0» et Christopher Nolan triomphent à Hollywood, «\\xa0Anatomie d’une chute\\xa0» récompensé du meilleur scénario original'],\n", " ['Ces grands-parents sous surveillance\\xa0: «\\xa0Nos enfants trouvent que la maison n’est pas assez sécurisée »'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le Kremlin estime qu’Emmanuel Macron augmente «\\xa0l’implication directe de la France\\xa0»'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['Santé : le ministre Frédéric Valletoux souhaite ouvrir une discussion sur la liste des affections longue durée, qui \"date des années 1980\"'],\n", " ['\"Mégacamions\"\\xa0: le Parlement européen adopte le texte autorisant leur circulation'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['Le médiatique prêtre Luc de Bellescize soupçonné d’agression sexuelle'],\n", " ['France Télévisions\\xa0: l’ancien président du CSA reconnaît publiquement avoir reçu une suggestion de François Hollande'],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " [\"Six nations 2024 : l'ascension supersonique de Nicolas Depoortere, titulaire chez les Bleus un an après ses débuts en pro\"],\n", " ['Mathias Vicherat, directeur de Sciences Po Paris, mis en\\xa0cause pour violences conjugales, annonce sa\\xa0démission'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: le Royaume-Uni va livrer plus de 10 000 drones à l’armée ukrainienne cette année'],\n", " ['\"On verra qui est pour Zelensky et qui est pour Poutine\" : pourquoi Emmanuel Macron veut contraindre les partis à se positionner sur la guerre en Ukraine'],\n", " ['Guerre en Ukraine : Vladimir Poutine accuse Kiev d\\'attaquer les régions russes pour \"tenter d\\'empêcher\" la présidentielle'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['Narcotrafic à Marseille : l\\'arrestation de Félix Bingui, \"un coup dur\" pour le clan Yoda'],\n", " ['Paris : un chauffeur de bus RATP en garde à vue après avoir écrasé le bras d’un sans domicile fixe'],\n", " ['Élections européennes : les intentions de vote pour le RN sont \"en progression très nette\", souligne le secrétaire général de la Fondation Jean-Jaurès'],\n", " ['Guerre en Ukraine\\xa0: entre Jordan Bardella et Marine Le Pen, une différence de forme'],\n", " [\"Intempéries dans le Gard et en Ardèche : ce que l'on sait après la découverte de trois corps\"],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: «\\xa0Le\\xa0cadre de\\xa0la\\xa0cobelligérance ne\\xa0doit pas être atteint\\xa0», affirme le\\xa0ministre des affaires étrangères, Stéphane Séjourné'],\n", " ['Irlande : victoire du \"non\" au référendum sur la place des femmes et la famille'],\n", " ['Le médiatique prêtre Luc de Bellescize soupçonné d’agression sexuelle'],\n", " ['Le tabou du viol des hommes par des femmes\\xa0: «\\xa0Je ne savais pas comment lui dire que je ne voulais pas coucher avec elle\\xa0»'],\n", " ['France Télévisions\\xa0: l’ancien président du CSA reconnaît publiquement avoir reçu une suggestion de François Hollande'],\n", " ['\"On verra qui est pour Zelensky et qui est pour Poutine\" : pourquoi Emmanuel Macron veut contraindre les partis à se positionner sur la guerre en Ukraine'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Leïla Bekhti annonce avoir donné naissance à son quatrième enfant'],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " ['Guerre en Ukraine : s\\'il est élu, Donald Trump \"ne donnera pas un centime\" pour le conflit, affirme Viktor Orban'],\n", " ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['\"Escalade guerrière\", \"encore plus inquiet\"... : les réactions des politiques après la réunion sur l\\'Ukraine à l\\'Élysée'],\n", " [\"Stratégie d'aide à l'Ukraine\\xa0: visualisez comment ont voté les députés\"],\n", " ['Paris : un chauffeur de bus RATP en garde à vue après avoir écrasé le bras d’un sans domicile fixe'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: une salve de drones russes a fait sept blessés à Soumy et endommagé des bâtiments civils'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " [\"Guerre en Ukraine : ce qu'il faut retenir de la journée du jeudi 7 mars\"],\n", " ['«\\xa0Anatomie d’une chute\\xa0»\\xa0: Emmanuel Macron félicite Justine Triet pour son Oscar'],\n", " ['Vingt-sept demi-frères et sœurs et un grand mystère\\xa0: «\\xa0Notre père biologique, c’est peut-être un serial\\xa0donneur\\xa0»'],\n", " ['France Travail annonce avoir été la cible d\\'une cyberattaque, 43 millions de personnes \"potentiellement\" concernées'],\n", " ['«\\xa0L’Allemagne et la France tirent deux conclusions totalement différentes de la guerre en Ukraine\\xa0»'],\n", " ['Vents violents : voici les 10 départements placés en vigilance jaune ce vendredi'],\n", " [\"Intempéries dans le Gard et en Ardèche : ce que l'on sait après la découverte de trois corps alors que les recherches ont été suspendues ce dimanche soir\"],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie attaque la ville d’Odessa pendant une visite de Volodymyr Zelensky et du premier ministre grec'],\n", " ['Mobilisation propalestinienne à Sciences Po\\xa0: le gouvernement va saisir la justice'],\n", " ['\"Escalade guerrière\", \"encore plus inquiet\"... Les réactions très critiques des politiques après la réunion sur l\\'Ukraine à l\\'Élysée'],\n", " [\"Nouvelle-Zélande : douze passagers d'un Boeing de la compagnie Latam blessés et hospitalisés après un trou d'air\"],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Guerre Israël-Hamas, jour\\xa0155\\xa0: Biden juge que Nétanyahou «\\xa0fait plus de mal que de bien à Israël\\xa0» par sa conduite de la guerre à Gaza'],\n", " [\"Inscription de l'IVG dans la Constitution\\xa0: suivez notre édition spéciale pour la cérémonie de scellement\"],\n", " ['Super Tuesday\\xa0: ce qu’il faut retenir de la nuit électorale des primaires républicaines et\\xa0démocrates aux Etats-Unis'],\n", " [\"Soupçons de violences conjugales\\xa0: Stéphane Plaza sera jugé devant le tribunal correctionnel à la fin de l'été\\xa02024\"],\n", " ['Sauter le repas du midi : voici les effets sur la santé'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie attaque la ville d’Odessa pendant une visite de Zelensky et du premier ministre grec, un rappel du «\\xa0besoin urgent\\xa0» d’aide militaire, alertent les Etats-Unis'],\n", " ['Présidentielle américaine 2024 : Donald Trump remporte 12 Etats lors du \"Super Tuesday\", victoire surprise de Nikki Haley dans le Vermont'],\n", " ['Miss Monde 2024\\xa0: Krystyna Pyszkova, Miss République tchèque, succède à la Polonaise Karolina Bielawska'],\n", " ['Marseille\\xa0: le chef du gang Yoda, l’un des deux principaux clans du narcobanditisme de la ville, arrêté au Maroc'],\n", " ['Six nations 2024 : Irlande, Angleterre, Écosse et France... Quels scénarios de sacre pour les quatre prétendants à la victoire finale'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " ['IVG dans la Constitution : Agnès Pannier-Runacher réagit au comportement de la chanteuse Catherine Ringer face à Emmanuel Macron'],\n", " ['Alerte aux crues, vagues-submersion\\xa0: six personnes sont mortes, dont une fillette de 4\\xa0ans, son frère est toujours porté disparu dans le Gard'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " ['Oscars 2024 : \"Oppenheimer\" sacré meilleur film, \"Anatomie d\\'une chute\" repart avec le meilleur scénario original'],\n", " [\"Vol MH370 : où en sont les recherches, dix ans après la disparition de l'avion de la Malaysia Airlines ?\"],\n", " ['Guerre Israël-Hamas\\xa0: des dizaines de morts à Gaza après des raids israéliens menés à la veille du ramadan'],\n", " ['Guerre en Ukraine\\xa0: comment Emmanuel Macron espère convaincre les Français de la gravité de la situation'],\n", " ['En direct, guerre en Ukraine\\xa0: Emmanuel Macron «\\xa0assume pleinement\\xa0» son appel à «\\xa0un sursaut stratégique\\xa0» en évoquant la possibilité d’envoyer des soldats en Ukraine'],\n", " ['Présidentielle américaine 2024\\xa0: Donald Trump rafle la victoire dans quatorze Etats lors du \"Super Tuesday\"'],\n", " ['Mobilisation propalestinienne à Sciences Po Paris\\xa0: Emmanuel Macron dénonce des propos «\\xa0intolérables\\xa0»'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " ['Voici les 5 pays où la dette publique par habitant est la plus élevée'],\n", " ['La CFDT attaque Carrefour sur sa politique sociale en France «\\xa0qui a des conséquences très fortes pour les travailleurs\\xa0»'],\n", " ['Météo : dix départements de la moitié sud du pays placés en vigilance orange'],\n", " ['Sauter le repas du midi : voici les effets sur la santé'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['Guerre en Ukraine\\xa0: Emmanuel Macron a dit qu’il n’y avait «\\xa0aucune limite\\xa0» au soutien français à Kiev, selon les chefs de parti présents à l’Elysée'],\n", " [\"Vote sur le soutien à l'Ukraine : comme les députés, les sénateurs approuvent à une large majorité l'accord de sécurité entre Paris et Kiev\"],\n", " ['Paris 2024 : Stanislas Guerini annonce \"des primes de 500, de 1\\t000, de 1 500 euros pour tous les agents de la fonction publique mobilisés\"'],\n", " ['Cinq légionnaires devant la justice pour avoir organisé la prostitution de jeunes femmes dans vingt-trois\\xa0villes françaises'],\n", " ['En direct, guerre en Ukraine\\xa0: la France évalue son aide militaire directe à 2,6\\xa0milliards d’euros depuis le début de l’offensive russe'],\n", " ['A Gaza, de nouveaux témoignages sur le «\\xa0massacre de la farine\\xa0»'],\n", " ['Biathlon : le programme complet de la 8e étape de la Coupe du monde à Soldier Hollow (États-Unis)'],\n", " ['Guerre en Ukraine\\xa0: comment Emmanuel Macron espère convaincre les Français de la gravité de la situation'],\n", " ['Voici les 5 pays où la dette publique par habitant est la plus élevée'],\n", " ['Présidentielle américaine 2024\\xa0: Donald Trump et Joe Biden appellent les électeurs de Nikki Haley à les rejoindre'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " [\"Intempéries : le corps d'un homme retrouvé dans le Gard, le bilan monte désormais à quatre morts\"],\n", " ['Guerre en Ukraine : \"Chacun des Français est menacé par le projet politique obscurantiste de Poutine\", affirme le général Vincent Desportes'],\n", " [\"Des policiers chinois en Hongrie, la tête de pont de l'influence de Pékin en Europe\"],\n", " [\"Sécurité sociale : les associations de patients souffrant d'affections longue durée s'inquiètent du projet de réforme de leur prise en charge\"],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " ['En direct, guerre en Ukraine\\xa0: le bilan des attaques meurtrières russes à\\xa0Odessa passe à huit morts\\xa0; Volodymyr Zelensky appelle à accélérer les livraisons d’armes à Kiev'],\n", " ['Un analyste du renseignement militaire américain arrêté pour espionnage'],\n", " ['Guerre en Ukraine\\xa0: Emmanuel Macron a dit qu’il n’y avait «\\xa0aucune limite\\xa0» au soutien français à Kiev, selon les chefs de parti présents à l’Elysée'],\n", " ['Miss Monde 2024 : à quelle heure et sur quelle chaîne suivre la cérémonie ?'],\n", " ['Intempéries : un homme est porté disparu en Ardèche, annonce la préfecture'],\n", " ['Guerre en Ukraine\\xa0: comment Emmanuel Macron espère convaincre les Français de la gravité de la situation'],\n", " [\"Sécurité sociale : les associations de patients souffrant d'affections longue durée s'inquiètent du projet de réforme de leur prise en charge\"],\n", " [\"Accusations de violences conjugales : l'animateur Stéphane Plaza entendu par la police\"],\n", " ['Biathlon : le programme complet de la 8e étape de la Coupe du monde à Soldier Hollow (États-Unis)'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['Guerre en Ukraine\\xa0: Volodymyr Zelensky appelle à accélérer les livraisons d’armes à Kiev'],\n", " [\"Paris 2024 : fermeture de l'espace aérien, jauge de spectateurs, défilé sur la Seine... Ce qu'il faut retenir du dispositif de sécurité pour la cérémonie d'ouverture\"],\n", " [\"Débat sur l'Ukraine à l'Assemblée : le Rassemblement national s'abstiendra, annonce Jordan Bardella\"],\n", " ['Alerte aux crues, vagues-submersion\\xa0: six personnes sont mortes, dont une fillette de 4\\xa0ans, son frère toujours porté disparu dans le Gard'],\n", " [\"Guerre en Ukraine : l'Allemagne embarrassée par une «fuite très grave» de conversations secrètes entre hauts gradés de l'armée\"],\n", " ['Six nations 2024 : rajeuni, le XV de France renoue avec le jeu et la victoire au pays de Galles'],\n", " ['En direct, Super Tuesday\\xa0: ce qu’il faut retenir de la nuit électorale des primaires républicaines et démocrates aux Etats-Unis'],\n", " ['Narcotrafic à Marseille : l\\'arrestation de Félix Bingui, \"un coup dur\" pour le clan Yoda'],\n", " ['Guerre en Ukraine\\xa0: Volodymyr Zelensky appelle à accélérer les livraisons d’armes à Kiev'],\n", " ['Des eurodéputés s’alarment du possible retrait des Emirats arabes unis de la liste des pays à haut risque de blanchiment'],\n", " ['Guerre Israël-Hamas, jour\\xa0155\\xa0: Biden juge que Nétanyahou «\\xa0fait plus de mal que de bien à Israël\\xa0» par sa conduite de la guerre à Gaza'],\n", " ['Vingt-sept demi-frères et sœurs et un grand mystère\\xa0: «\\xa0Notre père biologique, c’est peut-être un serial\\xa0donneur\\xa0»'],\n", " ['Paris 2024 : la CGT menace de déposer des préavis de grève et réclame des \"mesures immédiates\" du gouvernement'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " [\"Nouvelle-Zélande : douze passagers d'un Boeing de la compagnie Latam blessés et hospitalisés après un trou d'air\"],\n", " ['Présidentielle américaine 2024 : la Cour suprême annule une décision déclarant Donald Trump inéligible dans le Colorado'],\n", " ['Elections européennes 2024\\xa0: avec 13 points d’avance, le RN continue de creuser l’écart avec le camp présidentiel'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " ['Paris 2024 : cinq questions sur le criblage d\\'un million de personnes \"concernées de près ou de loin\" par les Jeux olympiques'],\n", " ['Finances publiques : \"Nous sommes au pied du mur, nous ne pouvons plus différer l\\'effort\", estime le président de la Cour des comptes'],\n", " ['La députée La France insoumise Ersilia Soudais porte plainte pour viol contre son conjoint, placé en garde à vue'],\n", " ['Face à l\\'inflation, de plus en plus de Français rognent sur leurs dépenses d\\'hygiène : \"On a l\\'impression de descendre une pente glissante\"'],\n", " ['En direct, guerre en Ukraine\\xa0: deux morts dans des attaques de drones russes'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['\"C\\'est travaillé, il n\\'improvise pas\" : dans les coulisses de \"l\\'hypercommunication\" de Gabriel Attal'],\n", " ['Exportations d’armes\\xa0: les Etats-Unis se renforcent, la France progresse et la Russie recule'],\n", " ['Mort d’Akira Toriyama, créateur du manga culte «\\xa0Dragon Ball\\xa0»'],\n", " ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " ['En direct, guerre en Ukraine\\xa0: Kiev affirme avoir détruit sept des huit drones lancés vers Odessa'],\n", " ['Ces influenceuses antiféministes qui réhabilitent le mythe de la «\\xa0bonne épouse\\xa0»'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['Ersilia Soudais, députée La France insoumise, porte plainte pour viol contre son conjoint, placé en garde à vue'],\n", " ['Gaza\\xa0: plus d’enfants ont été tués dans la bande de Gaza «\\xa0en quatre mois\\xa0» qu’en quatre ans de guerre dans le monde entier, alerte l’ONU'],\n", " ['Au procès de l\\'attentat de Strasbourg, le frère de Chérif Chekatt affirme que \"les monstres ne viennent pas du néant\"'],\n", " ['Face à l\\'inflation, de plus en plus de Français rognent sur leurs dépenses d\\'hygiène : \"On a l\\'impression de descendre une pente glissante\"'],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['Emmanuel Macron dénonce le «machisme» transphobe visant son épouse'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['Tabac : voici les 10 pays où le prix du paquet de cigarettes est le moins cher'],\n", " ['Avec les chasseurs alpins français dans l’Arctique norvégien\\xa0: «\\xa0Le grand froid est le terrain de jeu naturel des Russes, il faut être à niveau\\xa0»'],\n", " ['Le créateur du célèbre manga \"Dragon Ball\", Akira Toriyama, est mort à l\\'âge de 68 ans'],\n", " ['Elections européennes : \"Jordan Bardella et le Rassemblement national ne veulent pas parler d\\'Europe\", réagit Valérie Hayer'],\n", " ['«\\xa0Les biens occidentaux permettent la réorientation de l’économie russe vers la production de guerre\\xa0»'],\n", " ['En direct, guerre en Ukraine\\xa0: une journée de\\xa0deuil sera observée dimanche dans la\\xa0région d’Odessa après les attaques meurtrières de\\xa0drones russes'],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " ['Qui est Damien Cassé, le conjoint de la députée LFI Ersilia Soudais, accusé de viol par l’élue ?'],\n", " ['Ersilia Soudais, députée La France insoumise, porte plainte pour viol contre son conjoint, placé en garde à vue'],\n", " ['Anaïs Leleux, ex-compagne de Julien Bayou, porte plainte contre l\\'ancien leader d\\'EELV pour \"harcèlement moral\" et \"abus frauduleux de l’état de faiblesse\"'],\n", " ['Fin de vie\\xa0: Emmanuel Macron annonce pour avril un projet de loi prévoyant une «\\xa0aide à mourir\\xa0» sous «\\xa0conditions strictes\\xa0»'],\n", " ['Ces influenceuses antiféministes qui réhabilitent le mythe de la «\\xa0bonne épouse\\xa0»'],\n", " ['Guerre en Ukraine : \"Personne ne souhaite que les soldats français viennent verser leur sang sur notre terre\", confient des habitants de Kiev'],\n", " [\"Polémique sur l'affiche des JO 2024 : «L'idéal de cet artiste, c'est mon cauchemar», affirme Gilles-William Goldnadel\"],\n", " ['En direct, guerre en Ukraine\\xa0: le bilan des attaques meurtrières russes à\\xa0Odessa passe à huit morts\\xa0; Volodymyr Zelensky appelle à accélérer les livraisons d’armes à Kiev'],\n", " ['Elections européennes 2024\\xa0: avec 13 points d’avance, le RN continue de creuser l’écart avec le camp présidentiel'],\n", " ['\"C\\'est travaillé, il n\\'improvise pas\"\\xa0: dans les coulisses de \"l\\'hypercommunication\" de Gabriel Attal'],\n", " ['Débat sur l’Ukraine au Parlement\\xa0: LFI annonce voter contre la stratégie française d’aide à Kiev, le RN s’abstiendra'],\n", " ['Elections européennes : quel est le bilan du Rassemblement national au Parlement européen ?'],\n", " ['Cyclisme : 130 coureurs abandonnent après l’annonce d’un contrôle antidopage à l’arrivée'],\n", " ['En direct, guerre en Ukraine\\xa0: les Etats-Unis dénoncent la rhétorique irresponsable de Poutine, qui a parlé de «\\xa0menace réelle\\xa0» de guerre nucléaire en cas d’escalade de la guerre'],\n", " ['L\\'aide à mourir est \"un projet de loi euthanasie\", dénonce le Dr Claire Fourcade'],\n", " ['\"Les garçons prennent trop d\\'espace\" : comment les écoles réaménagent les cours de récréation pour réduire les inégalités de genre'],\n", " ['Elections européennes : quel est le bilan du Rassemblement national au Parlement européen ?'],\n", " ['Avec les chasseurs alpins français dans l’Arctique norvégien\\xa0: «\\xa0Le grand froid est le terrain de jeu naturel des Russes, il faut être à niveau\\xa0»'],\n", " ['En Haïti, les images d’une situation catastrophique et la menace d’une «\\xa0guerre civile\\xa0» par un chef de gang'],\n", " ['IVG dans la Constitution\\xa0: revivez les débats et l’adoption du texte par le Parlement à Versailles'],\n", " ['Présidentielle américaine 2024\\xa0: Donald Trump et Joe Biden confortés, deux résultats surprises… Ce qu\\'il faut retenir du \"Super Tuesday\"'],\n", " ['Qui est Damien Cassé, le conjoint de la députée LFI Ersilia Soudais, accusé de viol par l’élue ?'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['En Chine, le gouvernement toujours plus soumis au Parti communiste'],\n", " [\"Cinq choses à savoir sur les grandes marées d'équinoxe, un phénomène exceptionnel attendu dans les prochains jours\"],\n", " ['Guerre en Ukraine : Olaf Scholz annonce que l\\'Allemagne enquête sur une \"très grave\" fuite au sein de l\\'armée'],\n", " ['Santé : ce qui pourrait changer sur le remboursement des transports médicaux'],\n", " [\"L'amiral Philippe de Gaulle, fils du général, est mort à l'âge de 102 ans\"],\n", " ['Johanna Silva, le récit entre intime et politique d’une ancienne proche de François Ruffin'],\n", " ['France Travail annonce avoir été la cible d\\'une cyberattaque, 43 millions de personnes \"potentiellement\" concernées'],\n", " ['Le cercueil d’Alexeï Navalny en route vers un cimetière de Moscou, des milliers de personnes sont venues en dépit d’une très forte présence policière'],\n", " ['Santé : le ministre Frédéric Valletoux souhaite ouvrir une discussion sur la liste des affections longue durée, qui \"date des années 1980\"'],\n", " ['Fin de vie\\xa0: Emmanuel Macron annonce pour avril un projet de loi prévoyant une «\\xa0aide à mourir\\xa0» sous «\\xa0conditions strictes\\xa0»'],\n", " ['Arsenal nucléaire : voici les 9 pays les plus puissants du monde en 2024'],\n", " ['Le vote choc des Suisses pour un treizième mois de retraite'],\n", " ['IVG\\xa0: une statue de Simone Veil vandalisée à La\\xa0Roche-sur-Yon, l’Action française revendique'],\n", " ['Météo : 2 cm de glace, 30 cm de neige attendus... Ce département est placé en alerte rouge au verglas dès ce jeudi soir'],\n", " ['En direct, funérailles d’Alexeï Navalny\\xa0: la police a procédé à 45 arrestations lors de rassemblements'],\n", " ['L’Assemblée nationale approuve en\\xa0première lecture une loi de\\xa0réhabilitation pour les personnes condamnées pour homosexualité'],\n", " ['Armée : dans quels pays les militaires français sont-ils déployés ?'],\n", " ['Elections européennes 2024 : La France insoumise dévoile sa liste, la militante propalestinienne Rima Hassan en septième position'],\n", " ['Qui est Damien Cassé, le conjoint de la députée LFI Ersilia Soudais, accusé de viol par l’élue ?'],\n", " ['Menaces de grève sur les JO 2024 : \"Si on avait écouté les syndicats\" avant, \"on n\\'en serait pas venu à cette tragicomédie\", fustige Bernard Thibault'],\n", " ['Guerre en Ukraine : Olaf Scholz annonce que l\\'Allemagne enquête sur une \"très grave\" fuite au sein de l\\'armée'],\n", " ['\"C\\'est travaillé, il n\\'improvise pas\"\\xa0: dans les coulisses de \"l\\'hypercommunication\" de Gabriel Attal'],\n", " ['Débat sur le soutien à Kiev : \"Le succès de l\\'Ukraine, c\\'est aussi dans l\\'intérêt des Français\", déclare Gabriel Attal'],\n", " ['Elections européennes 2024 : La France insoumise dévoile sa liste, la militante propalestinienne Rima Hassan en septième position'],\n", " [\"Guerre en Ukraine : les oppositions dénoncent une posture «irresponsable» d'Emmanuel Macron après la rencontre organisée à l'Elysée\"],\n", " ['Gabriel Attal annonce une hausse spectaculaire du nombre de contrôles sur les demandeurs d’emploi'],\n", " ['IVG : revivez la cérémonie du scellement qui a entériné l’inscription dans la Constitution de la liberté de recourir à l’avortement'],\n", " ['Johanna Silva, le récit entre intime et politique d’une ancienne proche de François Ruffin'],\n", " ['Dépression Monica : une dizaine de départements français en vigilance orange'],\n", " ['En direct, guerre en Ukraine\\xa0: Emmanuel Macron salue le «\\xa0courage\\xa0» des Russes qui ont rendu hommage à Navalny'],\n", " ['Fin des super promotions, hausse du prix du tabac, RSA sous conditions étendu… Voici ce qui change au mois de mars'],\n", " ['L’Assemblée nationale approuve en\\xa0première lecture une loi de\\xa0réhabilitation pour les personnes condamnées pour homosexualité'],\n", " [\"Invasion russe en Ukraine : qu'est-ce qu'une économie de guerre, concept défendu notamment par Emmanuel Macron ?\"],\n", " ['Un discours à la nation de Poutine très attendu, à deux semaines d’une élection présidentielle sans opposition en Russie'],\n", " [\"Guerre en Ukraine : les oppositions dénoncent une posture «irresponsable» d'Emmanuel Macron après la rencontre organisée à l'Elysée\"],\n", " [\"Mort d'Alexeï Navalny : ce que révèlent ces lettres écrites par l'opposant russe lorsqu'il était en prison\"],\n", " ['Qui est Damien Cassé, le conjoint de la députée LFI Ersilia Soudais, accusé de viol par l’élue ?'],\n", " ['\"J\\'entends pleinement tenir ma place\", lance Valérie Hayer, au meeting de lancement de la campagne des élections européennes de la majorité'],\n", " ['Guerre en Ukraine\\xa0: l’Allemagne confirme qu’une conversation secrète entre des officiers a été «\\xa0interceptée\\xa0» après des fuites sur des comptes proches du Kremlin'],\n", " [\"Facebook, Instagram, Threads, Messenger... Plusieurs plateformes du groupe Meta victimes d'une panne mondiale\"],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['Débat sur l’Ukraine au Parlement\\xa0: LFI annonce voter contre la stratégie française d’aide à Kiev, le RN s’abstiendra'],\n", " [\"«Cette année, on a senti que le centre d'intérêt n'était pas forcément l'agriculture mais avant tout la politique», témoigne un éleveur\"],\n", " ['Menaces de grève sur les JO 2024 : \"Si on avait écouté les syndicats\" avant, \"on n\\'en serait pas venu à cette tragicomédie\", fustige Bernard Thibault'],\n", " [\"Le meurtrier du policier Eric Masson, Ilias Akoudad, est condamné à 30\\xa0ans de prison assortie d'une peine de 20 ans de sûreté\"],\n", " ['Real Sociedad-PSG\\xa0: grâce à un doublé de Mbappé, Paris se qualifie pour les quarts de finale de la Ligue des champions'],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['La perchiste Margot Chevrier grièvement blessée aux Mondiaux d’athlétisme en salle'],\n", " ['Vladimir Poutine lors de son discours à nation\\xa0: les soldats russes en Ukraine «\\xa0ne reculeront pas, n’échoueront pas, ne trahiront pas\\xa0»'],\n", " ['Escroquerie à la «\\xa0taxe carbone\\xa0»\\xa0: Marco Mouly, menacé d’incarcération, annonce partir en cavale'],\n", " ['Arsenal nucléaire : voici les 9 pays les plus puissants du monde en 2024'],\n", " ['Alerte aux crues, pluie et inondation\\xa0: six personnes portées disparues dans le Gard, une autre en Ardèche'],\n", " ['Mort d’Akira Toriyama, créateur du manga culte «\\xa0Dragon Ball\\xa0»'],\n", " [\"Sécurité sociale : les associations de patients souffrant d'affections longue durée s'inquiètent du projet de réforme de leur prise en charge\"],\n", " [\"Pyrénées : les images impressionnantes d'un ours qui s'est attaqué à un sanglier au bord d'une route (vidéo)\"],\n", " ['En direct, guerre en Ukraine\\xa0: «\\xa0la perspective d’une solution diplomatique est encore lointaine\\xa0», selon nos journalistes'],\n", " ['Biathlon : le programme complet de la 8e étape de la Coupe du monde à Soldier Hollow (États-Unis)'],\n", " [\"Des policiers chinois en Hongrie, la tête de pont de l'influence de Pékin en Europe\"],\n", " [\"Accident de car mortel sur l'A6 : le conducteur mis en examen pour homicide involontaire\"],\n", " ['En Ukraine, la bataille de l’air se rééquilibre'],\n", " [\"Chômage : France Travail lance un simulateur pour aider les demandeurs d'emploi à évaluer le montant de leurs allocations\"],\n", " ['RSA sous conditions : Matignon dévoile la liste des 29\\xa0nouveaux départements concernés par le dispositif'],\n", " ['Guerre en Ukraine : Moscou estime qu\\'Emmanuel Macron accroît \"l\\'implication\" de la France dans le conflit'],\n", " ['\"Cela s\\'est vraiment installé dans les pratiques\"\\xa0: près d\\'un cadre sur deux démissionnerait en cas de suppression du télétravail, selon une étude'],\n", " ['En direct, Super Tuesday\\xa0: ce qu’il faut retenir de la nuit électorale des primaires républicaines et démocrates aux Etats-Unis'],\n", " ['Sécurité sociale : le gouvernement veut réduire les dépenses liées aux affections longue durée'],\n", " ['\"Chine, opérations secrètes\" : comment un ex-agent des services secrets français a trahi son pays au profit de Pékin'],\n", " ['«\\xa0Dans le monde du libre-échange, une frite industrielle qui parcourt 10\\xa0000\\xa0kilomètres a plus de valeur qu’une pomme de terre locale\\xa0»'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " ['Guerre en Ukraine : \"Chacun des Français est menacé par le projet politique obscurantiste de Poutine\", affirme le général Vincent Desportes'],\n", " ['En direct, guerre en Ukraine\\xa0: «\\xa0La Russie ne s’arrêtera pas\\xa0», avertit Volodymyr Zelensky'],\n", " ['Manifestation du 8-Mars\\xa0: le préfet de police saisit la justice après les violences contre des femmes d’un collectif de la communauté juive'],\n", " ['Disparition du vol MH370\\xa0: que sait-on dix ans après\\xa0?'],\n", " ['En direct, guerre en Ukraine\\xa0: le point sur la situation mercredi 28\\xa0février'],\n", " [\"Guerre en Ukraine : ce que l'on sait des attaques de volontaires russes pro-Kiev sur le territoire de la Russie\"],\n", " ['A Moscou, Alexeï Navalny traité en ennemi jusque dans la tombe'],\n", " ['Sécurité sociale : le gouvernement veut réduire les dépenses liées aux affections longue durée'],\n", " ['Marseille\\xa0: le chef du gang Yoda, l’un des deux principaux clans du narcobanditisme de la ville, arrêté au Maroc'],\n", " ['Biathlon : le programme complet de la 8e étape de la Coupe du monde à Soldier Hollow (États-Unis)'],\n", " ['Fin de vie : Emmanuel Macron promet un projet de loi pour une \"aide à mourir\" sous \"conditions strictes\", qui sera présenté en avril'],\n", " ['Bruno Le\\xa0Maire\\xa0: «\\xa0L’Etat doit reprendre la main sur l’assurance-chômage de manière définitive\\xa0»'],\n", " ['Groupes de niveau au collège\\xa0: voici à quoi va ressembler le dispositif à la rentrée de septembre'],\n", " [\"Côte-d'Or : une adolescente tuée et douze personnes blessées dans un accident d'autocar sur l'autoroute A6\"],\n", " ['Guerre en Ukraine : Kiev convoque l\\'envoyé du Vatican après les propos du pape invitant le pays à \"hisser le drapeau blanc\"'],\n", " [\"Chômage : France Travail lance un simulateur pour aider les demandeurs d'emploi à évaluer le montant de leurs allocations\"],\n", " ['Nouveau contrôle technique à partir du 15 avril : qui est concerné ?'],\n", " ['Arsenal nucléaire : voici les 9 pays les plus puissants du monde en 2024'],\n", " ['Sur la piste des brouilleurs d’ondes, avec les «\\xa0chasseurs\\xa0» de l’ANFR'],\n", " ['En direct, guerre en Ukraine\\xa0: le comité Nobel dénonce la condamnation d’Oleg Orlov, une nouvelle tentative du «\\xa0régime\\xa0» de Poutine pour «\\xa0faire taire\\xa0» les critiques'],\n", " [\"Vol MH370 : où en sont les recherches, dix ans après la disparition de l'avion de la Malaysia Airlines ?\"],\n", " ['Face à l\\'inflation, de plus en plus de Français rognent sur leurs dépenses d\\'hygiène : \"On a l\\'impression de descendre une pente glissante\"'],\n", " ['Guerre en Ukraine : Vladimir Poutine met en garde les pays occidentaux contre une \"menace réelle\" de conflit nucléaire'],\n", " ['Hausse du prix du tabac : combien coûtait un paquet il y a 20 ans ?'],\n", " ['En Haïti, les images d’une situation catastrophique et la menace d’une «\\xa0guerre civile\\xa0» par un chef de gang'],\n", " ['A Moscou, Alexeï Navalny traité en ennemi jusque dans la tombe'],\n", " ['En direct, guerre en Ukraine\\xa0: les tensions en Transnistrie sont «\\xa0dangereuses\\xa0» pour la région, selon le premier ministre polonais'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Bruno Le\\xa0Maire\\xa0: «\\xa0L’Etat doit reprendre la main sur l’assurance-chômage de manière définitive\\xa0»'],\n", " ['\"On n\\'est pas comme lui\" : dans le Colorado, les républicains se déchirent autour de la candidature de Donald Trump à la présidentielle américaine'],\n", " ['Galles-France\\xa0: après un début de tournoi décevant, les Bleus se reprennent en marquant cinq essais à Cardiff'],\n", " ['Quand l’extrême droite dénonce, au mépris de la réalité, l’installation d’un «\\xa0camp de migrants\\xa0» dans un château'],\n", " ['Guerre en Ukraine : les clés pour comprendre le vote du Parlement sur le soutien de la France à Kiev'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " ['Gabriel Attal annonce une hausse spectaculaire du nombre de contrôles sur les demandeurs d’emploi'],\n", " ['En Ukraine, la bataille de l’air se rééquilibre'],\n", " ['Oscars 2024 : les dix\\xa0candidats à la récompense suprême du meilleur film'],\n", " ['Marie-Charlotte Iratzoquy, une championne de ski tiraillée par sa conscience écologique'],\n", " ['Triplement des contrôles des chômeurs : \"Le problème principal aujourd\\'hui est que les employeurs refusent d\\'embaucher\", réplique la CGT chômeurs et précaires'],\n", " [\"Chômage : France Travail lance un simulateur pour aider les demandeurs d'emploi à évaluer le montant de leurs allocations\"],\n", " ['Guerre en Ukraine : Vladimir Poutine met en garde les pays occidentaux contre une \"menace réelle\" de conflit nucléaire'],\n", " ['Contre-espionnage\\xa0: la DGSI enquête sur une tentative de déstabilisation des élections européennes par des prorusses en France'],\n", " ['\"Tout ce qui est marqué est vrai\" : on a retrouvé des agriculteurs qui s\\'affichent sur les emballages de produits Intermarché'],\n", " ['Menaces de grève sur les JO 2024 : \"Si on avait écouté les syndicats\" avant, \"on n\\'en serait pas venu à cette tragicomédie\", fustige Bernard Thibault'],\n", " ['En direct, guerre en Ukraine\\xa0: Moscou assure que la protection des habitants de Transnistrie est une «\\xa0priorité\\xa0»'],\n", " ['\"Franchement, qui aime faire le ménage\\xa0?\"\\xa0: les\\xa0femmes appelées à une grève du travail domestique pour le 8-Mars'],\n", " ['Triplement des contrôles des chômeurs : \"Le problème principal aujourd\\'hui est que les employeurs refusent d\\'embaucher\", réplique la CGT chômeurs et précaires'],\n", " ['Sur la piste des brouilleurs d’ondes, avec les «\\xa0chasseurs\\xa0» de l’ANFR'],\n", " ['Ce que révèle le registre des cadeaux offerts aux députés\\xa0: du champagne, des matchs de football et une «\\xa0nuit au château\\xa0»'],\n", " ['A Paris, une femme porte plainte pour viol lors d’un massage au Ritz'],\n", " ['La députée LFI Ersilia Soudais porte plainte pour viol, son conjoint placé en garde à vue'],\n", " ['Guerre en Ukraine\\xa0: «\\xa0Nous ferons tout ce qu’il faut pour que la Russie ne puisse pas gagner cette guerre\\xa0», affirme Macron lors du sommet des alliés européens de Kiev'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Nouveau contrôle technique à partir du 15 avril : qui est concerné ?'],\n", " ['\"Je n\\'imaginais pas me rhabiller et lui dire que je n\\'en avais plus envie\" : qu\\'est-ce que la \"dette sexuelle\", que 40% des Français déclarent avoir déjà ressentie ?'],\n", " ['\"On est des outsiders face aux mastodontes\" : comment l\\'équipe d\\'\"Anatomie d\\'une chute\" a fait campagne pour les Oscars 2024'],\n", " ['Guerre en Ukraine : des échanges entre officiers ont bien été \"interceptés\", selon le ministre de la Défense allemand'],\n", " ['Guerre en Ukraine\\xa0: le Kremlin accuse Emmanuel Macron d’accroître «\\xa0l’implication\\xa0directe de la France\\xa0» dans le conflit'],\n", " ['\"Cela s\\'est vraiment installé dans les pratiques\"\\xa0: près d\\'un cadre sur deux démissionnerait en cas de suppression du télétravail, selon une étude'],\n", " ['A Gaza, des dizaines de morts lors d’une distribution d’aide alimentaire, selon le Hamas\\xa0: Washington exige «\\xa0des réponses\\xa0» d’Israël, Emmanuel Macron exprime sa «\\xa0profonde indignation\\xa0»'],\n", " ['IVG dans la Constitution : Agnès Pannier-Runacher réagit au comportement de la chanteuse Catherine Ringer face à Emmanuel Macron'],\n", " [\"Affaire PPDA : le parquet de Nanterre demande aux juges d'instruction d'enquêter sur deux viols et une agression sexuelle\"],\n", " ['Elections européennes 2024\\xa0: la spectaculaire rétraction du macronisme'],\n", " ['Tabac : voici les 10 pays où le prix du paquet de cigarettes est le moins cher'],\n", " ['Agriculture\\xa0: les changements d’habitudes alimentaires des Français contribuent à la hausse des importations'],\n", " ['Pluie-inondation\\xa0: quatre départements du Sud-Est en vigilance orange samedi'],\n", " ['PFL Paris : la carte complète des combats avec Cédric Doumbè-«Baki»'],\n", " ['Val-d’Oise : une joueuse de 19 ans victime d’un arrêt cardiaque en plein tournoi de basket'],\n", " ['A Gaza, des dizaines de morts lors d’une distribution d’aide alimentaire, selon le Hamas\\xa0: Washington exige «\\xa0des réponses\\xa0» d’Israël, Emmanuel Macron exprime sa «\\xa0profonde indignation\\xa0»'],\n", " ['Ce que révèle le registre des cadeaux offerts aux députés\\xa0: du champagne, des matchs de football et une «\\xa0nuit au château\\xa0»'],\n", " [\"«Cette année, on a senti que le centre d'intérêt n'était pas forcément l'agriculture mais avant tout la politique», témoigne un éleveur\"],\n", " ['Guerre en Ukraine : \"C\\'est la sécurité de l\\'Europe qui est en jeu\", affirme le général Jean-Paul Paloméros après les propos d\\'Emmanuel Macron'],\n", " ['Les Etats-Unis «\\xa0n’enverront pas de soldats combattre en Ukraine\\xa0», assure la Maison Blanche'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " [\"Mort de Thomas à Crépol : onze personnes interpellées, soupçonnées d'avoir été présentes le soir de la mort de l'adolescent\"],\n", " [\"Bande de Gaza : les premiers largages d'aide humanitaire ont eu lieu\"],\n", " ['Affaire Arnaud Mimran : fin de l’enquête sur les trois meurtres attribués à l\\'escroc à la \"taxe carbone\"'],\n", " ['Cyberattaque contre plusieurs ministères : le parquet de Paris ouvre une enquête'],\n", " ['En direct, guerre en Ukraine\\xa0: l’envoi de troupes en Ukraine n’est «\\xa0pas d’actualité pour l’instant\\xa0», selon la Suède, et impossible pour la Slovaquie et la Pologne'],\n", " ['Violences en Haïti\\xa0: on a rencontré \"Barbecue\", ce chef de gang qui menace d\\'une guerre civile'],\n", " ['Sécurité sociale : le gouvernement veut réduire les dépenses liées aux affections longue durée'],\n", " ['Guerre en Ukraine\\xa0: l’Allemagne confirme qu’une conversation secrète entre des officiers a été «\\xa0interceptée\\xa0» après des fuites sur des comptes proches du Kremlin'],\n", " ['Envoi de troupes en Ukraine \"pas exclu\" par Emmanuel Macron : la question fait désormais partie des \"options\", selon un ancien colonel'],\n", " ['La mort d’Akira Toriyama, l’autodidacte du manga qui voulait juste dessiner ce qui lui plaisait'],\n", " ['Guerre en Ukraine\\xa0: le Kremlin accuse Emmanuel Macron d’accroître «\\xa0l’implication\\xa0directe de la France\\xa0» dans le conflit'],\n", " ['\"Je vais te péter toutes tes dents\" : on vous explique pourquoi Mathieu Kassovitz a la haine envers Saïd Taghmaoui'],\n", " ['A Gaza, une distribution alimentaire vire à la tragédie\\xa0: ce que l’on sait sur le convoi humanitaire et les tirs de l’armée israélienne'],\n", " ['Un conflit OTAN-Russie sera «\\xa0inévitable\\xa0» si des troupes occidentales sont envoyées en Ukraine, assure le porte-parole du Kremlin'],\n", " ['Vent de fronde à EDF contre un projet controversé de centrale hydroélectrique en Arabie saoudite'],\n", " ['Guerre en Ukraine\\xa0: Emmanuel Macron a dit qu’il n’y avait «\\xa0aucune limite\\xa0» au soutien français à Kiev, selon les chefs de parti présents à l’Elysée'],\n", " ['Depuis 2005, au moins 152 personnes ont été agressées par des chauffeurs de taxis ou des ambulanciers'],\n", " ['IVG dans la Constitution\\xa0: le vote du Sénat ouvre la voie à l’adoption en Congrès le 4 mars, revivez les débats'],\n", " ['Waze : comment une nouvelle fonctionnalité va-t-elle permettre de réduire le risque d’excès de vitesse ?'],\n", " ['Vent de fronde à EDF contre un projet controversé de centrale hydroélectrique en Arabie saoudite'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " [\"Six nations 2024 : une Italie spectaculaire s'offre l'Ecosse et signe sa première victoire à domicile dans le Tournoi depuis 2013\"],\n", " ['Au lendemain des funérailles de Navalny, des centaines de\\xa0Russes continuent de\\xa0défiler devant sa\\xa0tombe'],\n", " ['Au Salon de l’agriculture, Jordan Bardella répond aux attaques d’Emmanuel Macron\\xa0: «\\xa0C’est un homme seul qui gouverne contre les Français\\xa0»'],\n", " ['Conflit en Ukraine : un soutien militaire à Kiev ne serait \"pas dans l\\'intérêt\" des Occidentaux, met en garde le Kremlin'],\n", " [\"Affaire du voyage d'Anne Hidalgo en Nouvelle-Calédonie et Polynésie : la mairie de Paris perquisitionnée dans le cadre d'une enquête ouverte par le parquet national financier\"],\n", " ['Plus de 70 armes à feu et plus de 3\\xa0000 munitions saisies chez Alain Delon'],\n", " ['Haïti : le Premier ministre Ariel Henry démissionne, en pleine vague de violences des gangs'],\n", " ['En direct, guerre en Ukraine\\xa0: la France réunit les alliés européens de\\xa0l’Ukraine lundi à\\xa0Paris'],\n", " ['Marie-Charlotte Iratzoquy, une championne de ski tiraillée par sa conscience écologique'],\n", " ['Armée : dans quels pays les militaires français sont-ils déployés ?'],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['Colère des agriculteurs : \"Dans quel pays on vit ? Ça devient du grand n\\'importe quoi !\", réagit la Coordination rurale après l\\'interpellation de 66 personnes place de l\\'Etoile à Paris'],\n", " ['La Mairie de Paris perquisitionnée dans l’affaire du voyage d’Anne Hidalgo à Tahiti'],\n", " ['Élections européennes 2024 : Valérie Hayer, tête de liste Renaissance, appelle à un \"sursaut\" pour \"déjouer le scénario du pire\"'],\n", " ['Climat\\xa0: pourquoi les températures battent tous les records depuis la\\xa0mi-2023'],\n", " ['Des soldats israéliens déployés à Gaza mettent en scène leurs exactions sur les réseaux sociaux'],\n", " ['Les infections sexuellement transmissibles explosent en Europe'],\n", " ['Hausse du prix du tabac : combien coûtait un paquet il y a 20 ans ?'],\n", " ['Narcotrafic\\xa0: l’arrestation de Félix Bingui, épilogue d’une guerre des clans à Marseille'],\n", " ['Disparition du vol MH370\\xa0: que sait-on dix ans après\\xa0?'],\n", " ['\"Toutes les preuves étaient là\" : quelques jours avant les attaques du Hamas, des soldates israéliennes avaient alerté les autorités'],\n", " ['Nasa : ce que l’on sait du mystérieux phénomène capturé au-dessus du golfe du Mexique par l’agence spatiale américaine'],\n", " ['Vents violents : voici les 16 départements placés en vigilance jaune ce lundi'],\n", " ['Des soldats israéliens déployés à Gaza mettent en scène leurs exactions sur les réseaux sociaux'],\n", " [\"Guerre en Ukraine : les propos d'Emmanuel Macron sur un éventuel envoi de troupes occidentales suscitent la polémique\"],\n", " ['Emmanuel Macron appelle à un «\\xa0sursaut\\xa0» nécessaire des alliés de Kiev, en ouverture de la conférence de soutien à l’Ukraine'],\n", " [\"Guerre en Ukraine : les oppositions dénoncent une posture «irresponsable» d'Emmanuel Macron après la rencontre organisée à l'Elysée\"],\n", " ['Pour Jean Viard, \"le ramadan est de plus en plus pratiqué dans une culture en expansion, mais à laquelle on n\\'a pas donné pour l\\'instant sa place symbolique\"'],\n", " ['IVG dans la Constitution : le vote des parlementaires est en cours à Versailles'],\n", " ['Marion Maréchal et Eric Zemmour, en difficulté face au RN, renforcent leur ligne anti-islam lors de leur premier meeting des européennes'],\n", " ['\"On est entièrement d\\'accord\" : au Salon de l\\'agriculture, Les Ecologistes veulent faire oublier le désamour à leur égard'],\n", " ['En direct, guerre en Ukraine\\xa0: Emmanuel Macron appelle à un «\\xa0sursaut\\xa0» nécessaire des alliés de Kiev, lors de la conférence de soutien à l’Ukraine'],\n", " [\"La Suède devient officiellement le 32e membre de l'Otan\"],\n", " ['\"Mégacamions\"\\xa0: le Parlement européen adopte le texte autorisant leur circulation'],\n", " ['Real Sociedad-PSG\\xa0: revivez le doublé de Kylian Mbappé et la qualification de Paris pour les quarts de finale de la Ligue des champions'],\n", " [\"Colère des agriculteurs : comment l'incitation à réhabiliter les haies divise le monde rural\"],\n", " ['\"À un moment, j\\'ai dit stop\" : de jeunes professionnelles du cinéma racontent leurs débuts face aux violences sexuelles et au harcèlement'],\n", " ['A Moscou, Alexeï Navalny traité en ennemi jusque dans la tombe'],\n", " [\"Pyrénées : les images impressionnantes d'un ours qui s'est attaqué à un sanglier au bord d'une route (vidéo)\"],\n", " ['\"On est les dindons de la farce\" : cinq agriculteurs nous ouvrent leurs livres de comptes'],\n", " ['Au procès de l\\'attentat de Strasbourg, le frère de Chérif Chekatt affirme que \"les monstres ne viennent pas du néant\"'],\n", " ['Etats-Unis\\xa0: la Cour suprême va statuer sur l’immunité pénale de Donald Trump'],\n", " ['En direct, guerre en Ukraine\\xa0: des milliers de personnes manifestent en France et à l’étranger pour soutenir le pays'],\n", " ['Gabriel Attal dénonce le \"cirque médiatique\" de Jordan Bardella au Salon de l\\'agriculture'],\n", " ['Etats-Unis\\xa0: un Boeing 777\\xa0perd une roue peu après son décollage de San Francisco'],\n", " ['Isabelle Saporta, PDG de Fayard, placée dans une situation intenable'],\n", " ['Alexeï Navalny enterré à Moscou, en présence de milliers de ses partisans'],\n", " ['Guerre en Ukraine : Vladimir Poutine met en garde l\\'Occident contre la \"réelle menace\" de conflit nucléaire'],\n", " ['Les infections sexuellement transmissibles explosent en Europe'],\n", " ['Météo : voici les 5 départements qui seront touchés par l’épisode cévenol ce vendredi et ce week-end'],\n", " ['Dépression Monica : une dizaine de départements français en vigilance orange'],\n", " ['En direct, guerre en Ukraine\\xa0: des milliers de personnes manifestent en France et à l’étranger pour soutenir le pays'],\n", " ['Super Tuesday\\xa0: ce qu’il faut retenir de la nuit électorale des primaires républicaines et\\xa0démocrates aux Etats-Unis'],\n", " ['\"On est les dindons de la farce\" : cinq agriculteurs nous ouvrent leurs livres de comptes'],\n", " [\"Sécurité sociale : les associations de patients souffrant d'affections longue durée s'inquiètent du projet de réforme de leur prise en charge\"],\n", " ['\"À un moment, j\\'ai dit stop\" : de jeunes professionnelles du cinéma racontent leurs débuts face aux violences sexuelles et au harcèlement'],\n", " ['Manifestation des agriculteurs à Bordeaux\\xa0: le préfet de Gironde condamne \"avec la plus grande fermeté\" des \"actes de vandalisme\"'],\n", " ['Rachida Dati : avant la cérémonie des César, la ministre de la Culture fustige un «aveuglement qui a duré des années» dans le cinéma français'],\n", " ['Ce prénom féminin ancien devrait être le plus donné en 2024'],\n", " [\"Science Po Paris : un drapeau palestinien déployé dans l'établissement, le syndicat étudiant UNI dénonce «une hostilité à Israël»\"],\n", " ['Guerre Israël-Hamas\\xa0: des dizaines de morts à Gaza après des raids israéliens menés à la veille du ramadan'],\n", " ['Après une perquisition dans le manoir d’Alain Delon, la police trouve 72\\xa0armes à feu et plus de 3\\xa0000\\xa0munitions saisies'],\n", " ['\"Il y a de quoi avoir le vertige\" : après quatre mois de travail, la commission d\\'enquête du Sénat sur le trafic de drogue en France dresse un premier bilan alarmant'],\n", " [\"Guerre en Ukraine : derrière le tabou de l'envoi des troupes, la très secrète présence occidentale aux côtés de Kiev\"],\n", " ['La bonne fortune de Dominique de Villepin'],\n", " ['Sécurité sociale : le gouvernement veut réduire les dépenses liées aux affections longue durée'],\n", " ['Savoie : un touriste belge attaqué par 2 chiens sur une piste de ski aux Contamines'],\n", " ['\"On est les dindons de la farce\" : cinq agriculteurs nous ouvrent leurs livres de comptes'],\n", " ['Le rappeur Lomepal visé par une deuxième plainte pour viol'],\n", " ['Gabriel Attal annonce une hausse spectaculaire du nombre de contrôles sur les demandeurs d’emploi'],\n", " ['Après une perquisition dans le manoir d’Alain Delon, la police trouve 72\\xa0armes à feu et plus de 3\\xa0000\\xa0munitions saisies'],\n", " ['La bonne fortune de Dominique de Villepin'],\n", " ['Plainte de la députée LFI Ersilia Soudais pour viol : la garde à vue de son conjoint levée'],\n", " ['Super Tuesday\\xa0: ce qu’il faut retenir de la nuit électorale des primaires républicaines et\\xa0démocrates aux Etats-Unis'],\n", " ['Attentat déjoué en Belgique : trois adolescents interpellés en France'],\n", " ['Oscars 2024\\xa0: «\\xa0Oppenheimer\\xa0» et Christopher Nolan triomphent à Hollywood, «\\xa0Anatomie d’une chute\\xa0» récompensé du meilleur scénario'],\n", " ['Les infections sexuellement transmissibles explosent en Europe'],\n", " ['Élections européennes : les intentions de vote pour le RN sont \"en progression très nette\", souligne le secrétaire général de la Fondation Jean-Jaurès'],\n", " ['Nouveau contrôle technique à partir du 15 avril : qui est concerné ?'],\n", " ['Guerre en Ukraine\\xa0: l’Allemagne confirme qu’une conversation secrète entre des officiers a été «\\xa0interceptée\\xa0» après des fuites sur des comptes proches du Kremlin'],\n", " ['«\\xa0L’Europe est le continent qui se réchauffe le plus rapidement\\xa0» et doit agir beaucoup plus vite pour éviter des situations «\\xa0catastrophiques\\xa0»'],\n", " ['\"Madame, je veux en finir, je n\\'en peux plus\" : on a suivi une assistante sociale au chevet des agriculteurs en Haute-Vienne'],\n", " ['Climat : la neige a recouvert moins de 40% de la surface des Alpes au mois de février, un record'],\n", " ['Salon de l\\'agriculture : Jordan Bardella réclame un \"patriotisme économique\" et déplore \"la détresse de la France rurale\"'],\n", " ['RSA sous conditions : Matignon dévoile la liste des 29\\xa0nouveaux départements concernés par le dispositif'],\n", " ['En direct, guerre en Ukraine\\xa0: l’Elysée affirme que le soutien de la France à Kiev ne «\\xa0faiblira pas\\xa0»'],\n", " ['Sécurité sociale : le gouvernement veut réduire les dépenses liées aux affections longue durée'],\n", " ['A Gaza, des dizaines de morts lors d’une distribution d’aide alimentaire, selon le Hamas\\xa0: Washington exige «\\xa0des réponses\\xa0» d’Israël, Emmanuel Macron exprime sa «\\xa0profonde indignation\\xa0»'],\n", " ['\"Je n\\'imaginais pas me rhabiller et lui dire que je n\\'en avais plus envie\" : qu\\'est-ce que la \"dette sexuelle\", que 40% des Français déclarent avoir déjà ressentie ?'],\n", " ['L\\'\"aide à mourir\" proposée par Emmanuel Macron est \"une avancée\" mais \"n\\'est pas faisable\" en l\\'état, alerte Jean Luc Roméro'],\n", " ['Emmanuel Macron persiste sur l’envoi éventuel de militaires en Ukraine, la plupart des alliés occidentaux prennent leurs distances'],\n", " ['\"C\\'est travaillé, il n\\'improvise pas\" : dans les coulisses de \"l\\'hypercommunication\" de Gabriel Attal'],\n", " ['RSA sous conditions : Matignon dévoile la liste des 29\\xa0nouveaux départements concernés par le dispositif'],\n", " ['Miss Monde 2024\\xa0: Krystyna Pyszkova, Miss République tchèque, succède à la Polonaise Karolina Bielawska'],\n", " ['«\\xa0America is back\\xa0», le bilan économique reluisant de Joe\\xa0Biden'],\n", " ['Patrick Balkany souhaite «\\xa0virer ceux qui sont là\\xa0» lors des prochaines élections municipales de Levallois-Perret'],\n", " ['En direct, guerre en Ukraine\\xa0: Giorgia Meloni présidera un sommet du G7 samedi après-midi à Kiev'],\n", " [\"Guerre entre Israël et le Hamas : pourquoi le début du ramadan, dimanche, complique l'espoir d'une trêve et risque d'entraîner une nouvelle escalade\"],\n", " ['Gabriel Attal veut encore durcir les règles de l’assurance-chômage, en réduisant un peu plus la durée d’indemnisation'],\n", " ['France-Italie : un pâle XV de France concède le match nul... Revivez le match du Six Nations'],\n", " [\"«La victoire de l'Ukraine est probablement impossible (...), on est dans une situation internationale extrêmement grave», s'inquiète Jean-Pierre Raffarin\"],\n", " ['Nasa : ce que l’on sait du mystérieux phénomène capturé au-dessus du golfe du Mexique par l’agence spatiale américaine'],\n", " [\"IVG : pourquoi le Sénat pourrait retarder l'inscription du droit à l'avortement dans la Constitution\"],\n", " [\"Six nations 2024 : l'ascension supersonique de Nicolas Depoortere, titulaire chez les Bleus un an après ses débuts en pro\"],\n", " ['Retraite complémentaire Agirc-Arrco : ce qui change au 1er mars'],\n", " ['Elections européennes : quel est le bilan du Rassemblement national au Parlement européen ?'],\n", " ['En direct, guerre en Ukraine\\xa0: des milliers de personnes manifestent en France et à l’étranger pour soutenir le pays'],\n", " ['La France insoumise et les écologistes rattrapés par deux affaires #metoo'],\n", " [\"Marseille-Montpellier : doublé pour Aubameyang, l'OM s'envole... Suivez le match de Ligue 1\"],\n", " ['La DGSI enquête sur une tentative de déstabilisation des élections européennes par des prorusses en France'],\n", " ['Ramadan : la mort, les destructions et la famine éclipsent les préparatifs des réfugiés de Gaza'],\n", " ['\"Je n\\'imaginais pas me rhabiller et lui dire que je n\\'en avais plus envie\" : qu\\'est-ce que la \"dette sexuelle\", que 40% des Français déclarent avoir déjà ressentie ?'],\n", " ['Galles-France\\xa0: après un début de tournoi décevant, les Bleus se reprennent en marquant cinq essais à Cardiff'],\n", " ['Voici les 10 armées les plus grandes au monde en 2024'],\n", " ['Présidentielle américaine 2024\\xa0: Nikki Haley remporte à Washington sa première primaire républicaine devant Donald Trump'],\n", " ['Narcotrafic\\xa0: l’arrestation de Félix Bingui, épilogue d’une guerre des clans à Marseille'],\n", " ['César 2024 : triomphe pour \"Anatomie d\\'une chute\", discours poignant de Judith Godrèche, Monia Chokri crée la surprise... Ce qu\\'il faut retenir de cette 49e édition'],\n", " ['Neige et verglas : voici les 8 départements placés en vigilance jaune ce mardi'],\n", " ['En direct, guerre en Ukraine\\xa0: des milliers de personnes manifestent en France et à l’étranger pour soutenir le pays'],\n", " ['A Gaza, une distribution alimentaire vire à la tragédie\\xa0: ce que l’on sait sur le convoi humanitaire et les tirs de l’armée israélienne'],\n", " ['Guerre en Ukraine\\xa0: Emmanuel Macron a dit qu’il n’y avait «\\xa0aucune limite\\xa0» au soutien français à Kiev, selon les chefs de parti présents à l’Elysée'],\n", " [\"Guerre en Ukraine : l'Allemagne embarrassée par une «fuite très grave» de conversations secrètes entre hauts gradés de l'armée\"],\n", " ['\"On n\\'est pas comme lui\" : dans le Colorado, les républicains se déchirent autour de la candidature de Donald Trump à la présidentielle américaine'],\n", " ['«\\xa0Prix planchers\\xa0»\\xa0: la volte-face de Jordan Bardella trouble le RN'],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " [\"Aya Nakamura, qui pourrait chanter Édith Piaf aux JO, ciblée par l'extrême droite\"],\n", " [\"Clermont-Marseille : Aubameyang redonne l'avantage à l'OM... Suivez le match de Ligue 1\"],\n", " ['En direct\\xa0: Paris accueillera lundi une réunion internationale de soutien à l’Ukraine'],\n", " [\"Salon de l'agriculture : Emmanuel Macron débat avec des représentants syndicaux dans un climat de forte tension\"],\n", " [\"Guerre au Proche-Orient : Joe Biden considère que Benjamin Nétanyahou doit faire plus pour l'aide à Gaza\"],\n", " ['\"C’est de loin la meilleure piscine de France !\" : dans les coulisses des premiers entraînements de l\\'équipe de France de plongeon au Centre aquatique olympique'],\n", " ['Deepfake de Joe Biden\\xa0: l’identité du commanditaire dévoilée'],\n", " ['Guerre en Ukraine : \"C\\'est la sécurité de l\\'Europe qui est en jeu\", affirme le général Jean-Paul Paloméros après les propos d\\'Emmanuel Macron'],\n", " ['\"Je n\\'imaginais pas me rhabiller et lui dire que je n\\'en avais plus envie\" : qu\\'est-ce que la \"dette sexuelle\", que 40% des Français déclarent avoir déjà ressentie ?'],\n", " ['Au lendemain des funérailles de Navalny, des centaines de\\xa0Russes continuent de\\xa0défiler devant sa\\xa0tombe'],\n", " ['\"Que se passera-t-il si la Russie gagne en Ukraine ?\" : sans attendre son adhésion à l\\'Otan, la Suède s\\'est déjà préparée à accueillir des militaires'],\n", " ['Escroquerie à la «\\xa0taxe carbone\\xa0»\\xa0: Marco Mouly, menacé d’incarcération, annonce partir en cavale'],\n", " ['En direct, guerre en Ukraine\\xa0: «\\xa0La société russe était divisée avant la guerre, elle l’est encore plus aujourd’hui\\xa0»'],\n", " ['Mort d’Akira Toriyama, créateur du manga culte «\\xa0Dragon Ball\\xa0»'],\n", " ['Irlande : victoire du \"non\" au référendum sur la place des femmes et la famille'],\n", " [\"Le corps de l'opposant russe Alexeï Navalny a été remis à sa mère\"],\n", " ['Les prix de l’immobilier en France ne s’ajustent plus, ils baissent'],\n", " ['Retraite complémentaire Agirc-Arrco : ce qui change au 1er mars'],\n", " ['Isabelle Saporta, PDG de Fayard, placée dans une situation intenable'],\n", " [\"Guerre en Ukraine : pourquoi l'Europe n'arrive-t-elle pas à produire assez de munitions pour Kiev ?\"],\n", " ['«\\xa0America is back\\xa0», le bilan économique reluisant de Joe\\xa0Biden'],\n", " ['Nouvelles règles pour Apple, Google, Facebook... Ce qui va changer avec l\\'entrée en vigueur du \"Digital Markets Act\" européen'],\n", " ['En direct, guerre en Ukraine\\xa0: les dernières sanctions américaines sont une ingérence «\\xa0cynique\\xa0», pour l’ambassadeur russe aux Etats-Unis'],\n", " ['Des soldats israéliens déployés à Gaza mettent en scène leurs exactions sur les réseaux sociaux'],\n", " [\"Guerre en Ukraine : pourquoi l'Europe n'arrive-t-elle pas à produire assez de munitions pour Kiev ?\"],\n", " [\"Polar\\xa0: le meilleur roman policier de ces 20 dernières années est suédois, selon l'avis de plus de 14.000 lecteurs\"],\n", " ['Météo : l\\'Hérault placé en vigilance orange pour risque de \"crues\", au total 10 départements de la moitié sud en alerte'],\n", " ['Guerre en Ukraine : cinq questions sur la plus importante salve de sanctions américaines contre la Russie'],\n", " ['La perchiste Margot Chevrier grièvement blessée aux Mondiaux d’athlétisme en salle'],\n", " ['Une avalanche à la station du Mont-Dore, dans le Puy-de-Dôme, fait quatre morts et trois blessés'],\n", " ['Envoi de troupes en Ukraine \"pas exclu\" par Emmanuel Macron : la question fait désormais partie des \"options\", selon un ancien colonel'],\n", " [\"Paris 2024 : l'affiche officielle des JO critiquée par des personnalités politiques de droite et d'extrême droite\"],\n", " ['Guerre en Ukraine : Gabriel Attal évoque \"les troupes de Vladimir Poutine\" en désignant Marine Le Pen'],\n", " ['\"On a beaucoup parlé de la scène de viol\" : comment le cinéma tente de mieux protéger les mineurs sur les tournages'],\n", " ['Au Salon de l’agriculture, Jordan Bardella répond aux attaques d’Emmanuel Macron\\xa0: «\\xa0C’est un homme seul qui gouverne contre les Français\\xa0»'],\n", " ['Coupes courtes 2024 : voici les 4 coiffures les plus tendances de l’année'],\n", " ['En direct, guerre en Ukraine\\xa0: l’Ukraine affirme avoir abattu un second avion radar russe A-50'],\n", " ['Dépression Monica : une dizaine de départements français en vigilance orange'],\n", " ['RSA sous conditions : Matignon dévoile la liste des 29\\xa0nouveaux départements concernés par le dispositif'],\n", " ['Après l’inscription de l’IVG dans la\\xa0Constitution, la\\xa0presse internationale salue un\\xa0vote «\\xa0historique\\xa0»'],\n", " [\"Cinq choses à savoir sur les grandes marées d'équinoxe, un phénomène exceptionnel attendu dans les prochains jours\"],\n", " ['Salon de l\\'agriculture : l\\'Elysée assure que les Soulèvements de la Terre n\\'ont été \"ni conviés ni contactés\" pour participer au débat avec Emmanuel Macron samedi'],\n", " ['En direct, guerre en Ukraine\\xa0: à Moscou, la tombe d’Alexeï Navalny continue d’être ensevelie sous les fleurs, deux jours après l’enterrement de l’opposant'],\n", " ['En direct, guerre en Ukraine\\xa0: les Etats-Unis ne pensent «\\xa0pas du tout\\xa0» que la Russie a pris un avantage militaire sur l’Ukraine'],\n", " ['IVG dans la Constitution\\xa0: le vote du Sénat ouvre la voie à l’adoption en Congrès le 4 mars, revivez les débats'],\n", " ['«\\xa0L’Allemagne et la France tirent deux conclusions totalement différentes de la guerre en Ukraine\\xa0»'],\n", " ['Voici les 10 armées les plus grandes au monde en 2024'],\n", " ['IVG\\xa0: une statue de Simone Veil vandalisée à La\\xa0Roche-sur-Yon, l’Action française revendique'],\n", " ['Joe Biden lance réellement sa campagne présidentielle par son discours offensif sur l’état de l’Union, creusant le contraste avec Donald Trump'],\n", " ['Climat : la température à la surface des océans a atteint un niveau record en février'],\n", " ['Paris : un adolescent de 16 ans frappé à coups de marteau par un groupe de jeunes'],\n", " ['Val-d’Oise : une joueuse de 19 ans victime d’un arrêt cardiaque en plein tournoi de basket'],\n", " ['IVG dans la Constitution\\xa0: le vote du Sénat ouvre la voie à l’adoption en Congrès le 4 mars, revivez les débats'],\n", " ['Plus de 70 armes à feu et plus de 3\\xa0000 munitions saisies chez Alain Delon'],\n", " ['Au Salon de l’agriculture, Emmanuel Macron creuse un peu plus le gouffre avec les défenseurs de l’environnement'],\n", " ['Dépression Monica : un homme a été retrouvé mort dans le fleuve Hérault, à Pézenas'],\n", " ['En direct, guerre en Ukraine\\xa0: selon le ministre des armées français, la Russie a menacé d’abattre des avions français en mer Noire il y a un mois'],\n", " [\"Salon de l'agriculture : Emmanuel Macron annule le débat prévu samedi avec les acteurs du secteur après le désistement des principaux syndicats\"],\n", " ['\"J\\'ai discuté et j\\'ai évolué\" : cinq sénateurs racontent pourquoi ils ont changé d\\'avis sur l\\'inscription de l\\'IVG dans la Constitution'],\n", " ['«\\xa0L’Europe est le continent qui se réchauffe le plus rapidement\\xa0» et doit agir beaucoup plus vite pour éviter des situations «\\xa0catastrophiques\\xa0»'],\n", " [\"Le vaccin contre le Covid-19 pourrait être à l'origine de certains problèmes de santé, selon une étude\"],\n", " [\"Le meurtrier du policier Eric Masson, Ilias Akoudad, est condamné à 30\\xa0ans de prison assortie d'une peine de 20 ans de sûreté\"],\n", " ['Au Salon de l’agriculture, Emmanuel Macron creuse un peu plus le gouffre avec les défenseurs de l’environnement'],\n", " [\"Sécurité sociale : les associations de patients souffrant d'affections longue durée s'inquiètent du projet de réforme de leur prise en charge\"],\n", " ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays'],\n", " ['La Mairie de Paris perquisitionnée dans l’affaire du voyage d’Anne Hidalgo à Tahiti'],\n", " ['En direct, guerre en Ukraine\\xa0: plus de 14\\xa0millions de personnes ont fui le pays depuis le début de la guerre, selon l’ONU'],\n", " [\"Guerre en Ukraine : Vladimir Poutine, conforté par les avancées russes sur le terrain, prononce aujourd'hui son discours à la nation\"],\n", " ['Elections européennes : comment Jordan Bardella, la tête de liste du Rassemblement national, a distancé ses concurrents avant le début du match'],\n", " ['Alexeï Navalny enterré à Moscou, en présence de milliers de ses partisans'],\n", " ['La députée LFI Ersilia Soudais porte plainte pour viol, son conjoint en garde à vue'],\n", " ['Gabriel Attal dénonce le \"cirque médiatique\" de Jordan Bardella au Salon de l\\'agriculture'],\n", " [\"Vol MH370 : où en sont les recherches, dix ans après la disparition de l'avion de la Malaysia Airlines ?\"],\n", " ['Au Parlement européen, les soutiens de Vladimir Poutine se rassemblent autour du RN'],\n", " ['\"Anatomie d\\'une chute\" et \"Le Règne animal\" vont-ils tout rafler ? Suivez la 49e cérémonie des César'],\n", " ['Espace : pourquoi est-ce si difficile de se poser sur la Lune ?'],\n", " ['En direct, au Salon de l’agriculture\\xa0: Emmanuel Macron a quitté la porte de Versailles après treize heures de visite'],\n", " ['En direct, guerre en Ukraine\\xa0: les Etats Unis dévoilent des chefs d’accusation contre plusieurs oligarques russes'],\n", " ['\"On est les dindons de la farce\" : cinq agriculteurs nous ouvrent leurs livres de comptes'],\n", " ['Joe Biden lance réellement sa campagne présidentielle par son discours offensif sur l’état de l’Union, creusant le contraste avec Donald Trump'],\n", " ['«\\xa0Prix planchers\\xa0»\\xa0: la volte-face de Jordan Bardella trouble le RN'],\n", " ['Pompéi\\xa0: de splendides fresques découvertes lors de travaux de restauration et de fouilles'],\n", " [\"Colère des agriculteurs : comment l'incitation à réhabiliter les haies divise le monde rural\"],\n", " ['Escroquerie à la «\\xa0taxe carbone\\xa0»\\xa0: Marco Mouly, menacé d’incarcération, annonce partir en cavale'],\n", " ['Intempéries : deux nouveaux corps sans vie retrouvés à Goudargues dans le Gard, portant le bilan provisoire à trois morts'],\n", " ['En direct, au Salon de l’agriculture\\xa0: Emmanuel Macron a quitté la porte de Versailles après treize heures de visite'],\n", " ['Real Sociedad-PSG\\xa0: revivez le doublé de Kylian Mbappé et la qualification de Paris pour les quarts de finale de la Ligue des champions'],\n", " ['\"Que se passera-t-il si la Russie gagne en Ukraine ?\" : sans attendre son adhésion à l\\'Otan, la Suède s\\'est déjà préparée à accueillir des militaires'],\n", " [\"Guerre en Ukraine : l'ex-commandant en chef ukrainien nommé ambassadeur au Royaume-Uni\"],\n", " ['\"Salopard cinglé\" : Joe Biden s\\'emporte contre Vladimir Poutine lors d\\'une rencontre avec des donateurs'],\n", " ['En direct, guerre en Ukraine\\xa0: l’armée ukrainienne dément la prise de la ville de Krynky, dans la région de Kherson, par l’armée russe'],\n", " ['\"Chine, opérations secrètes\" : comment un ex-agent des services secrets français a trahi son pays au profit de Pékin'],\n", " ['Fin de vie : \"L\\'Etat ne doit en aucune manière se donner l\\'autorisation de tuer quelqu\\'un\", selon Philippe Juvin'],\n", " [\"Alix Heurtaut : «Ce ne sont pas les associations écologistes qui nous tapent dessus toute l'année qui vont permettre de nourrir les Français»\"],\n", " [\"Pyrénées : les images impressionnantes d'un ours qui s'est attaqué à un sanglier au bord d'une route (vidéo)\"],\n", " ['Coupes courtes 2024 : voici les 4 coiffures les plus tendances de l’année'],\n", " ['Narcotrafic\\xa0: l’arrestation de Félix Bingui, épilogue d’une guerre des clans à Marseille'],\n", " ['En direct, guerre en Ukraine\\xa0: l’Union européenne s’entend sur un nouveau paquet de sanctions contre la Russie'],\n", " ['Le ministre des Armées alerte sur des menaces russes contre des patrouilles françaises'],\n", " ['Emmanuel Macron persiste sur l’envoi éventuel de militaires en Ukraine, la plupart des alliés occidentaux prennent leurs distances'],\n", " ['\"Les garçons prennent trop d\\'espace\" : comment les écoles réaménagent les cours de récréation pour réduire les inégalités de genre'],\n", " ['Europe : l’inquiétude d’une ingérence russe'],\n", " ['Pour sauver leur couple, ils ont choisi l’amour libre\\xa0: «\\xa0Nous voulions expérimenter, sans nous quitter\\xa0»'],\n", " ['Amazon met en vente sur son site des maisons à 20.000 euros, livrées en moins de 3 semaines'],\n", " ['Guerre en Ukraine : Emmanuel Macron affirme que tous ses mots sont \"pesés\" et \"mesurés\"'],\n", " ['Armée : qu’est ce que le filet de camouflage multispectral, dont la France commence à s’équiper ?'],\n", " [\"Puy-de-Dôme : les quatre victimes de l'avalanche à la station du Mont-Dore sont des alpinistes français\"],\n", " ['Ce que révèle le registre des cadeaux offerts aux députés\\xa0: du champagne, des matchs de football et une «\\xa0nuit au château\\xa0»'],\n", " ['Guerre en Ukraine\\xa0: l’Allemagne confirme qu’une conversation secrète entre des officiers a été «\\xa0interceptée\\xa0» après des fuites sur des comptes proches du Kremlin'],\n", " ['En direct, guerre en Ukraine\\xa0: Volodymyr Zelensky appelle à l’unité entre la Pologne et l’Ukraine, au lendemain d’une mobilisation d’agriculteurs polonais'],\n", " ['Emmanuel Macron persiste sur l’envoi éventuel de militaires en Ukraine, la plupart des alliés occidentaux prennent leurs distances'],\n", " ['La Russie avait les plans de la contre-offensive ukrainienne, affirme Volodymyr Zelensky'],\n", " ['«\\xa0Un bowling\\xa0? C’est 40 euros. Soit tu paies, soit tu dis en permanence non à tes enfants\\xa0»\\xa0: les vacances par temps d’inflation\\xa0à Center Parcs'],\n", " ['Guerre entre Israël et le Hamas : pourquoi le début du ramadan fait craindre une nouvelle escalade de violence'],\n", " ['L’ONU considère que les colonies israéliennes relèvent du crime de guerre'],\n", " ['Qui est Damien Cassé, le conjoint de la députée LFI Ersilia Soudais, accusé de viol par l’élue ?'],\n", " ['Au lendemain des funérailles de Navalny, des centaines de\\xa0Russes continuent de\\xa0défiler devant sa\\xa0tombe'],\n", " ['Imam accusé d\\'\"appels à la haine\" : Mahjoub Mahjoubi expulsé vers la Tunisie'],\n", " ['Panthéonisation de Manouchian : \"Quand on propose d\\'expulser tous les immigrés, on n\\'a rien à faire au Panthéon\", lance au RN le fils de Joseph Epstein, fusillé en 1944'],\n", " ['«\\xa0Anatomie d’une chute\\xa0» poursuit son incroyable parcours avec l’Oscar du meilleur scénario original'],\n", " ['Guerre en Ukraine : cinq questions sur la plus importante salve de sanctions américaines contre la Russie'],\n", " ['En direct, guerre en Ukraine\\xa0: les Etats de l’Union européenne proposent de reconduire les mécanismes d’encadrement des importations d’Ukraine'],\n", " ['Paris 2024 : on a testé \"Anticiper les Jeux\", le site qui promet d\\'éviter la \"grande galère\" des transports en Île-de-France'],\n", " ['Guerre en Ukraine : comment la Russie utilise les armes fournies à Kiev pour torpiller le soutien des pays occidentaux'],\n", " ['\"On est entièrement d\\'accord\" : au Salon de l\\'agriculture, Les Ecologistes veulent faire oublier le désamour à leur égard'],\n", " ['Emmanuel Macron annule le débat prévu pour l’ouverture du Salon de l’agriculture après le refus de la FNSEA d’y participer'],\n", " ['Guerre en Ukraine : Washington juge \"irresponsables\" les propos de Vladimir Poutine sur la \"menace réelle\" de guerre nucléaire'],\n", " ['Au Parlement européen, les soutiens de Vladimir Poutine se rassemblent autour du RN'],\n", " [\"IVG : pourquoi le Sénat pourrait retarder l'inscription du droit à l'avortement dans la Constitution\"],\n", " ['Soins palliatifs : à Cahuzac-sur-Vère, dans le Tarn, une colocation innovante pour finir ses jours \"comme à la maison\"'],\n", " ['Alexeï Navalny\\xa0: ce que l’on sait trois jours après la mort en prison de l’opposant russe'],\n", " ['Le cas extrême d’une femme rappelle l’accès déplorable aux soins dentaires au Royaume-Uni'],\n", " [\"Guerre Israël-Hamas : cinq personnes tuées par un largage d'aide humanitaire sur Gaza\"],\n", " ['\"On est entièrement d\\'accord\" : au Salon de l\\'agriculture, Les Ecologistes veulent faire oublier le désamour à leur égard'],\n", " ...]" ] }, "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], "source": [ "phrases" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "entity_recognizer_md download started this may take some time.\n", "Approx size to download 440.6 MB\n", "[ / ]entity_recognizer_md download started this may take some time.\n", "Approximate size to download 440.6 MB\n", "[ — ]Download done! Loading the resource.\n", "[ \\ ]" ] }, { "name": "stderr", "output_type": "stream", "text": [ "WARNING: An illegal reflective access operation has occurred\n", "WARNING: Illegal reflective access by org.apache.spark.util.SizeEstimator$ (file:/home/theenglishway/Documents/dev/python/le_monde/.venv/lib/python3.11/site-packages/pyspark/jars/spark-core_2.12-3.5.1.jar) to field java.util.regex.Pattern.pattern\n", "WARNING: Please consider reporting this to the maintainers of org.apache.spark.util.SizeEstimator$\n", "WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations\n", "WARNING: All illegal access operations will be denied in a future release\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "[ — ]" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2024-03-27 18:34:05.001850: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:1' assigned_device_name_='' resource_device_name_='/device:GPU:1' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "GatherV2: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Cast: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/shape (Const) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/min (Const) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/max (Const) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/sub (Sub) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform/mul (Mul) \n", " char_repr_cnn/char_embeddings/Initializer/random_uniform (Add) \n", " char_repr_cnn/char_embeddings (VariableV2) /device:GPU:1\n", " char_repr_cnn/char_embeddings/Assign (Assign) /device:GPU:1\n", " char_repr_cnn/char_embeddings/read (Identity) /device:GPU:1\n", " char_repr_cnn/embedding_lookup/axis (Const) /device:GPU:1\n", " char_repr_cnn/embedding_lookup (GatherV2) /device:GPU:1\n", " training_1/gradients/char_repr_cnn/embedding_lookup_grad/Shape (Const) /device:GPU:1\n", " training_1/gradients/char_repr_cnn/embedding_lookup_grad/ToInt32 (Cast) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam/Initializer/zeros/Const (Const) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam/Initializer/zeros (Fill) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam/read (Identity) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1/Initializer/zeros/Const (Const) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1/Initializer/zeros (Fill) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1 (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/char_embeddings/Adam_1/read (Identity) /device:GPU:1\n", " training_1/Adam/update_char_repr_cnn/char_embeddings/ApplyAdam (ApplyAdam) /device:GPU:1\n", " save/Assign_2 (Assign) /device:GPU:1\n", " save/Assign_47 (Assign) /device:GPU:1\n", " save/Assign_48 (Assign) /device:GPU:1\n", "\n", "2024-03-27 18:34:05.002277: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:1' assigned_device_name_='' resource_device_name_='/device:GPU:1' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/shape (Const) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/min (Const) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/max (Const) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/sub (Sub) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform/mul (Mul) \n", " char_repr_cnn/conv1d/kernel/Initializer/random_uniform (Add) \n", " char_repr_cnn/conv1d/kernel (VariableV2) /device:GPU:1\n", " char_repr_cnn/conv1d/kernel/Assign (Assign) /device:GPU:1\n", " char_repr_cnn/conv1d/kernel/read (Identity) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam/Initializer/zeros (Fill) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam/read (Identity) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1 (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/conv1d/kernel/Adam_1/read (Identity) /device:GPU:1\n", " training_1/Adam/update_char_repr_cnn/conv1d/kernel/ApplyAdam (ApplyAdam) /device:GPU:1\n", " save/Assign_4 (Assign) /device:GPU:1\n", " save/Assign_51 (Assign) /device:GPU:1\n", " save/Assign_52 (Assign) /device:GPU:1\n", "\n", "2024-03-27 18:34:05.002570: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:1' assigned_device_name_='' resource_device_name_='/device:GPU:1' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_cnn/conv1d/bias/Initializer/zeros (Const) \n", " char_repr_cnn/conv1d/bias (VariableV2) /device:GPU:1\n", " char_repr_cnn/conv1d/bias/Assign (Assign) /device:GPU:1\n", " char_repr_cnn/conv1d/bias/read (Identity) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam/Initializer/zeros (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam/read (Identity) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam_1/Initializer/zeros (Const) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam_1 (VariableV2) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam_1/Assign (Assign) /device:GPU:1\n", " training/char_repr_cnn/conv1d/bias/Adam_1/read (Identity) /device:GPU:1\n", " training_1/Adam/update_char_repr_cnn/conv1d/bias/ApplyAdam (ApplyAdam) /device:GPU:1\n", " save/Assign_3 (Assign) /device:GPU:1\n", " save/Assign_49 (Assign) /device:GPU:1\n", " save/Assign_50 (Assign) /device:GPU:1\n", "\n", "2024-03-27 18:34:05.002930: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "GatherV2: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Cast: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/shape (Const) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/min (Const) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/max (Const) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/sub (Sub) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform/mul (Mul) \n", " char_repr_lstm/char_embeddings/Initializer/random_uniform (Add) \n", " char_repr_lstm/char_embeddings (VariableV2) /device:GPU:0\n", " char_repr_lstm/char_embeddings/Assign (Assign) /device:GPU:0\n", " char_repr_lstm/char_embeddings/read (Identity) /device:GPU:0\n", " char_repr_lstm/embedding_lookup/axis (Const) /device:GPU:0\n", " char_repr_lstm/embedding_lookup (GatherV2) /device:GPU:0\n", " training_1/gradients/char_repr_lstm/embedding_lookup_grad/Shape (Const) /device:GPU:0\n", " training_1/gradients/char_repr_lstm/embedding_lookup_grad/ToInt32 (Cast) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam/read (Identity) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1 (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/char_embeddings/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_char_repr_lstm/char_embeddings/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_9 (Assign) /device:GPU:0\n", " save/Assign_61 (Assign) /device:GPU:0\n", " save/Assign_62 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.003362: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/shape (Const) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/min (Const) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/max (Const) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/sub (Sub) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform/mul (Mul) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Initializer/random_uniform (Add) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel (VariableV2) /device:GPU:0\n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam/read (Identity) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_char_repr_lstm/bidirectional_rnn/fw/lstm_cell/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_8 (Assign) /device:GPU:0\n", " save/Assign_59 (Assign) /device:GPU:0\n", " save/Assign_60 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.003628: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Initializer/zeros (Const) \n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias (VariableV2) /device:GPU:0\n", " char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam/read (Identity) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_char_repr_lstm/bidirectional_rnn/fw/lstm_cell/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_7 (Assign) /device:GPU:0\n", " save/Assign_57 (Assign) /device:GPU:0\n", " save/Assign_58 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.004121: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/shape (Const) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/min (Const) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/max (Const) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/sub (Sub) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform/mul (Mul) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Initializer/random_uniform (Add) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel (VariableV2) /device:GPU:0\n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam/read (Identity) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_char_repr_lstm/bidirectional_rnn/bw/lstm_cell/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_6 (Assign) /device:GPU:0\n", " save/Assign_55 (Assign) /device:GPU:0\n", " save/Assign_56 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.004395: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Initializer/zeros (Const) \n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias (VariableV2) /device:GPU:0\n", " char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam/read (Identity) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_char_repr_lstm/bidirectional_rnn/bw/lstm_cell/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_5 (Assign) /device:GPU:0\n", " save/Assign_53 (Assign) /device:GPU:0\n", " save/Assign_54 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.004898: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " W/Initializer/random_uniform/shape (Const) \n", " W/Initializer/random_uniform/min (Const) \n", " W/Initializer/random_uniform/max (Const) \n", " W/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " W/Initializer/random_uniform/sub (Sub) \n", " W/Initializer/random_uniform/mul (Mul) \n", " W/Initializer/random_uniform (Add) \n", " W (VariableV2) /device:GPU:0\n", " W/Assign (Assign) /device:GPU:0\n", " W/read (Identity) /device:GPU:0\n", " training_1/beta1_power/initial_value (Const) /device:GPU:0\n", " training_1/beta1_power (VariableV2) /device:GPU:0\n", " training_1/beta1_power/Assign (Assign) /device:GPU:0\n", " training_1/beta1_power/read (Identity) /device:GPU:0\n", " training_1/beta2_power/initial_value (Const) /device:GPU:0\n", " training_1/beta2_power (VariableV2) /device:GPU:0\n", " training_1/beta2_power/Assign (Assign) /device:GPU:0\n", " training_1/beta2_power/read (Identity) /device:GPU:0\n", " training/W/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/W/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/W/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/W/Adam (VariableV2) /device:GPU:0\n", " training/W/Adam/Assign (Assign) /device:GPU:0\n", " training/W/Adam/read (Identity) /device:GPU:0\n", " training/W/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/W/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/W/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/W/Adam_1 (VariableV2) /device:GPU:0\n", " training/W/Adam_1/Assign (Assign) /device:GPU:0\n", " training/W/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_W/ApplyAdam (ApplyAdam) /device:GPU:0\n", " training_1/Adam/mul (Mul) /device:GPU:0\n", " training_1/Adam/Assign (Assign) /device:GPU:0\n", " training_1/Adam/mul_1 (Mul) /device:GPU:0\n", " training_1/Adam/Assign_1 (Assign) /device:GPU:0\n", " save/Assign (Assign) /device:GPU:0\n", " save/Assign_43 (Assign) /device:GPU:0\n", " save/Assign_44 (Assign) /device:GPU:0\n", " save/Assign_129 (Assign) /device:GPU:0\n", " save/Assign_130 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.005236: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " b/Initializer/random_uniform/shape (Const) \n", " b/Initializer/random_uniform/min (Const) \n", " b/Initializer/random_uniform/max (Const) \n", " b/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " b/Initializer/random_uniform/sub (Sub) \n", " b/Initializer/random_uniform/mul (Mul) \n", " b/Initializer/random_uniform (Add) \n", " b (VariableV2) /device:GPU:0\n", " b/Assign (Assign) /device:GPU:0\n", " b/read (Identity) /device:GPU:0\n", " training/b/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/b/Adam (VariableV2) /device:GPU:0\n", " training/b/Adam/Assign (Assign) /device:GPU:0\n", " training/b/Adam/read (Identity) /device:GPU:0\n", " training/b/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/b/Adam_1 (VariableV2) /device:GPU:0\n", " training/b/Adam_1/Assign (Assign) /device:GPU:0\n", " training/b/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_b/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_1 (Assign) /device:GPU:0\n", " save/Assign_45 (Assign) /device:GPU:0\n", " save/Assign_46 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.005597: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_13 (Assign) /device:GPU:0\n", " save/Assign_69 (Assign) /device:GPU:0\n", " save/Assign_70 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.005816: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell/bias/Initializer/Const (Const) \n", " context_repr/lstm-0/lstm_fused_cell/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_12 (Assign) /device:GPU:0\n", " save/Assign_67 (Assign) /device:GPU:0\n", " save/Assign_68 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006002: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_15 (Assign) /device:GPU:0\n", " save/Assign_73 (Assign) /device:GPU:0\n", " save/Assign_74 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006177: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_14 (Assign) /device:GPU:0\n", " save/Assign_71 (Assign) /device:GPU:0\n", " save/Assign_72 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006338: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_16 (Assign) /device:GPU:0\n", " save/Assign_75 (Assign) /device:GPU:0\n", " save/Assign_76 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006563: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell_1/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell_1/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_18 (Assign) /device:GPU:0\n", " save/Assign_79 (Assign) /device:GPU:0\n", " save/Assign_80 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006712: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell_1/bias/Initializer/Const (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell_1/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_17 (Assign) /device:GPU:0\n", " save/Assign_77 (Assign) /device:GPU:0\n", " save/Assign_78 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.006948: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell_1/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_20 (Assign) /device:GPU:0\n", " save/Assign_83 (Assign) /device:GPU:0\n", " save/Assign_84 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007181: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell_1/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_19 (Assign) /device:GPU:0\n", " save/Assign_81 (Assign) /device:GPU:0\n", " save/Assign_82 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007349: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-0/lstm_fused_cell_1/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_21 (Assign) /device:GPU:0\n", " save/Assign_85 (Assign) /device:GPU:0\n", " save/Assign_86 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007561: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_23 (Assign) /device:GPU:0\n", " save/Assign_89 (Assign) /device:GPU:0\n", " save/Assign_90 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007703: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell/bias/Initializer/Const (Const) \n", " context_repr/lstm-1/lstm_fused_cell/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_22 (Assign) /device:GPU:0\n", " save/Assign_87 (Assign) /device:GPU:0\n", " save/Assign_88 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007850: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_25 (Assign) /device:GPU:0\n", " save/Assign_93 (Assign) /device:GPU:0\n", " save/Assign_94 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.007999: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_24 (Assign) /device:GPU:0\n", " save/Assign_91 (Assign) /device:GPU:0\n", " save/Assign_92 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008149: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_26 (Assign) /device:GPU:0\n", " save/Assign_95 (Assign) /device:GPU:0\n", " save/Assign_96 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008352: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell_1/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell_1/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_28 (Assign) /device:GPU:0\n", " save/Assign_99 (Assign) /device:GPU:0\n", " save/Assign_100 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008494: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell_1/bias/Initializer/Const (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell_1/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_27 (Assign) /device:GPU:0\n", " save/Assign_97 (Assign) /device:GPU:0\n", " save/Assign_98 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008637: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell_1/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_30 (Assign) /device:GPU:0\n", " save/Assign_103 (Assign) /device:GPU:0\n", " save/Assign_104 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008797: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell_1/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_29 (Assign) /device:GPU:0\n", " save/Assign_101 (Assign) /device:GPU:0\n", " save/Assign_102 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.008975: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-1/lstm_fused_cell_1/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_31 (Assign) /device:GPU:0\n", " save/Assign_105 (Assign) /device:GPU:0\n", " save/Assign_106 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009178: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_33 (Assign) /device:GPU:0\n", " save/Assign_109 (Assign) /device:GPU:0\n", " save/Assign_110 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009317: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell/bias/Initializer/Const (Const) \n", " context_repr/lstm-2/lstm_fused_cell/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_32 (Assign) /device:GPU:0\n", " save/Assign_107 (Assign) /device:GPU:0\n", " save/Assign_108 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009465: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_35 (Assign) /device:GPU:0\n", " save/Assign_113 (Assign) /device:GPU:0\n", " save/Assign_114 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009614: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_34 (Assign) /device:GPU:0\n", " save/Assign_111 (Assign) /device:GPU:0\n", " save/Assign_112 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009763: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_36 (Assign) /device:GPU:0\n", " save/Assign_115 (Assign) /device:GPU:0\n", " save/Assign_116 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.009960: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell_1/kernel (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/kernel/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/kernel/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell_1/kernel/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_38 (Assign) /device:GPU:0\n", " save/Assign_119 (Assign) /device:GPU:0\n", " save/Assign_120 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010120: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Identity: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell_1/bias/Initializer/Const (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/bias (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/bias/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/bias/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/bias/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell_1/bias/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_37 (Assign) /device:GPU:0\n", " save/Assign_117 (Assign) /device:GPU:0\n", " save/Assign_118 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010274: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell_1/w_i_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_40 (Assign) /device:GPU:0\n", " save/Assign_123 (Assign) /device:GPU:0\n", " save/Assign_124 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010438: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell_1/w_f_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_39 (Assign) /device:GPU:0\n", " save/Assign_121 (Assign) /device:GPU:0\n", " save/Assign_122 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010592: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/shape (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/min (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/max (Const) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/sub (Sub) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform/mul (Mul) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Initializer/random_uniform (Add) \n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag (VariableV2) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Assign (Assign) /device:GPU:0\n", " context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/lstm-2/lstm_fused_cell_1/w_o_diag/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_41 (Assign) /device:GPU:0\n", " save/Assign_125 (Assign) /device:GPU:0\n", " save/Assign_126 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010801: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Fill: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/W/Initializer/random_uniform/shape (Const) \n", " context_repr/W/Initializer/random_uniform/min (Const) \n", " context_repr/W/Initializer/random_uniform/max (Const) \n", " context_repr/W/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/W/Initializer/random_uniform/sub (Sub) \n", " context_repr/W/Initializer/random_uniform/mul (Mul) \n", " context_repr/W/Initializer/random_uniform (Add) \n", " context_repr/W (VariableV2) /device:GPU:0\n", " context_repr/W/Assign (Assign) /device:GPU:0\n", " context_repr/W/read (Identity) /device:GPU:0\n", " training/context_repr/W/Adam/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/W/Adam/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/W/Adam/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/W/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/W/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/W/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/W/Adam_1/Initializer/zeros/shape_as_tensor (Const) /device:GPU:0\n", " training/context_repr/W/Adam_1/Initializer/zeros/Const (Const) /device:GPU:0\n", " training/context_repr/W/Adam_1/Initializer/zeros (Fill) /device:GPU:0\n", " training/context_repr/W/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/W/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/W/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/W/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_10 (Assign) /device:GPU:0\n", " save/Assign_63 (Assign) /device:GPU:0\n", " save/Assign_64 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.010944: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:0' assigned_device_name_='' resource_device_name_='/device:GPU:0' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " context_repr/b/Initializer/random_uniform/shape (Const) \n", " context_repr/b/Initializer/random_uniform/min (Const) \n", " context_repr/b/Initializer/random_uniform/max (Const) \n", " context_repr/b/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " context_repr/b/Initializer/random_uniform/sub (Sub) \n", " context_repr/b/Initializer/random_uniform/mul (Mul) \n", " context_repr/b/Initializer/random_uniform (Add) \n", " context_repr/b (VariableV2) /device:GPU:0\n", " context_repr/b/Assign (Assign) /device:GPU:0\n", " context_repr/b/read (Identity) /device:GPU:0\n", " training/context_repr/b/Adam/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/b/Adam (VariableV2) /device:GPU:0\n", " training/context_repr/b/Adam/Assign (Assign) /device:GPU:0\n", " training/context_repr/b/Adam/read (Identity) /device:GPU:0\n", " training/context_repr/b/Adam_1/Initializer/zeros (Const) /device:GPU:0\n", " training/context_repr/b/Adam_1 (VariableV2) /device:GPU:0\n", " training/context_repr/b/Adam_1/Assign (Assign) /device:GPU:0\n", " training/context_repr/b/Adam_1/read (Identity) /device:GPU:0\n", " training_1/Adam/update_context_repr/b/ApplyAdam (ApplyAdam) /device:GPU:0\n", " save/Assign_11 (Assign) /device:GPU:0\n", " save/Assign_65 (Assign) /device:GPU:0\n", " save/Assign_66 (Assign) /device:GPU:0\n", "\n", "2024-03-27 18:34:05.011099: W external/org_tensorflow/tensorflow/core/common_runtime/colocation_graph.cc:1218] Failed to place the graph without changing the devices of some resources. Some of the operations (that had to be colocated with resource generating operations) are not supported on the resources' devices. Current candidate devices are [\n", " /job:localhost/replica:0/task:0/device:CPU:0].\n", "See below for details of this colocation group:\n", "Colocation Debug Info:\n", "Colocation group had the following types and supported devices: \n", "Root Member(assigned_device_name_index_=-1 requested_device_name_='/device:GPU:2' assigned_device_name_='' resource_device_name_='/device:GPU:2' supported_device_types_=[CPU] possible_devices_=[]\n", "ApplyAdam: CPU \n", "Switch: CPU \n", "Assign: CPU \n", "VariableV2: CPU \n", "Identity: CPU \n", "Add: CPU \n", "Mul: CPU \n", "Sub: CPU \n", "RandomUniform: CPU \n", "Const: CPU \n", "\n", "Colocation members, user-requested devices, and framework assigned devices, if any:\n", " inference/transition_params/Initializer/random_uniform/shape (Const) \n", " inference/transition_params/Initializer/random_uniform/min (Const) \n", " inference/transition_params/Initializer/random_uniform/max (Const) \n", " inference/transition_params/Initializer/random_uniform/RandomUniform (RandomUniform) \n", " inference/transition_params/Initializer/random_uniform/sub (Sub) \n", " inference/transition_params/Initializer/random_uniform/mul (Mul) \n", " inference/transition_params/Initializer/random_uniform (Add) \n", " inference/transition_params (VariableV2) /device:GPU:2\n", " inference/transition_params/Assign (Assign) /device:GPU:2\n", " inference/transition_params/read (Identity) /device:GPU:2\n", " inference/cond/Reshape_4/Switch (Switch) /device:GPU:2\n", " inference/cond_1/ExpandDims/Switch (Switch) /device:GPU:2\n", " inference/cond_2/ExpandDims_1/Switch (Switch) /device:GPU:2\n", " training/inference/transition_params/Adam/Initializer/zeros (Const) /device:GPU:2\n", " training/inference/transition_params/Adam (VariableV2) /device:GPU:2\n", " training/inference/transition_params/Adam/Assign (Assign) /device:GPU:2\n", " training/inference/transition_params/Adam/read (Identity) /device:GPU:2\n", " training/inference/transition_params/Adam_1/Initializer/zeros (Const) /device:GPU:2\n", " training/inference/transition_params/Adam_1 (VariableV2) /device:GPU:2\n", " training/inference/transition_params/Adam_1/Assign (Assign) /device:GPU:2\n", " training/inference/transition_params/Adam_1/read (Identity) /device:GPU:2\n", " training_1/Adam/update_inference/transition_params/ApplyAdam (ApplyAdam) /device:GPU:2\n", " save/Assign_42 (Assign) /device:GPU:2\n", " save/Assign_127 (Assign) /device:GPU:2\n", " save/Assign_128 (Assign) /device:GPU:2\n", "\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "[OK!]\n" ] } ], "source": [ "# spark is the Spark Session automatically started by pyspark.\n", "data = spark.createDataFrame(phrases).toDF(\"text\")\n", "\n", "# Download the pretrained pipeline from Johnsnowlab's servers\n", "entity_recognizer_pipeline = PretrainedPipeline(\"entity_recognizer_md\", lang=\"fr\")" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "ner = entity_recognizer_pipeline.annotate(phrases[0])" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['Guerre en Ukraine', 'Macron', 'France 2', 'TF1']" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ner[0][\"entities\"]" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [], "source": [ "ner_list = entity_recognizer_pipeline.annotate([p for p_list in phrases[:200] for p in p_list])" ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[{'entities': ['Guerre en Ukraine', 'Macron', 'France 2', 'TF1'],\n", " 'document': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'sentence': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"]},\n", " {'entities': ['Médias', 'Vincent Bolloré', 'Cyril Hanouna', 'Pascal Praud'],\n", " 'document': [\"Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud... Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"],\n", " 'token': ['Médias',\n", " ':',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'Cyril',\n", " 'Hanouna',\n", " ',',\n", " 'Pascal',\n", " 'Praud',\n", " '...',\n", " 'Ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " 'leurs',\n", " 'déclarations',\n", " 'devant',\n", " 'la',\n", " 'commission',\n", " \"d'enquête\",\n", " 'de',\n", " \"l'Assemblée\",\n", " 'nationale'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Médias',\n", " ':',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'Cyril',\n", " 'Hanouna',\n", " ',',\n", " 'Pascal',\n", " 'Praud',\n", " '...',\n", " 'Ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " 'leurs',\n", " 'déclarations',\n", " 'devant',\n", " 'la',\n", " 'commission',\n", " \"d'enquête\",\n", " 'de',\n", " \"l'Assemblée\",\n", " 'nationale'],\n", " 'sentence': ['Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud...',\n", " \"Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"]},\n", " {'entities': ['Vladimir Poutine'],\n", " 'document': [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"],\n", " 'token': ['Russie',\n", " ':',\n", " 'cinq',\n", " 'choses',\n", " 'à',\n", " 'savoir',\n", " 'sur',\n", " \"l'élection\",\n", " 'présidentielle',\n", " ',',\n", " 'que',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'est',\n", " 'assuré',\n", " 'de',\n", " 'remporter'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Russie',\n", " ':',\n", " 'cinq',\n", " 'choses',\n", " 'à',\n", " 'savoir',\n", " 'sur',\n", " \"l'élection\",\n", " 'présidentielle',\n", " ',',\n", " 'que',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'est',\n", " 'assuré',\n", " 'de',\n", " 'remporter'],\n", " 'sentence': [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"]},\n", " {'entities': ['ministre des Sports', 'Amélie Oudéa-Castéra', 'Noël Le Graët'],\n", " 'document': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " 'token': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'ner': ['O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'sentence': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"]},\n", " {'entities': ['Impôts'],\n", " 'document': ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024'],\n", " 'token': ['Impôts',\n", " ':',\n", " 'voici',\n", " 'les',\n", " 'dates',\n", " 'limites',\n", " 'pour',\n", " 'remplir',\n", " 'sa',\n", " 'déclaration',\n", " 'de',\n", " 'revenus',\n", " 'en',\n", " 'ligne',\n", " 'en',\n", " '2024'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Impôts',\n", " ':',\n", " 'voici',\n", " 'les',\n", " 'dates',\n", " 'limites',\n", " 'pour',\n", " 'remplir',\n", " 'sa',\n", " 'déclaration',\n", " 'de',\n", " 'revenus',\n", " 'en',\n", " 'ligne',\n", " 'en',\n", " '2024'],\n", " 'sentence': ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024']},\n", " {'entities': ['Deepfakes'],\n", " 'document': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " 'token': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'ner': ['O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'sentence': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile']},\n", " {'entities': ['Sciences Po Paris', 'Sciences Po', 'Palestine'],\n", " 'document': ['Sciences Po Paris : \"Il n\\'y a pas eu de propos antisémites\", assure une membre du comité Sciences Po pour la Palestine'],\n", " 'token': ['Sciences',\n", " 'Po',\n", " 'Paris',\n", " ':',\n", " '\"',\n", " 'Il',\n", " \"n'y\",\n", " 'a',\n", " 'pas',\n", " 'eu',\n", " 'de',\n", " 'propos',\n", " 'antisémites',\n", " '\",',\n", " 'assure',\n", " 'une',\n", " 'membre',\n", " 'du',\n", " 'comité',\n", " 'Sciences',\n", " 'Po',\n", " 'pour',\n", " 'la',\n", " 'Palestine'],\n", " 'ner': ['I-ORG',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Sciences',\n", " 'Po',\n", " 'Paris',\n", " ':',\n", " '\"',\n", " 'Il',\n", " \"n'y\",\n", " 'a',\n", " 'pas',\n", " 'eu',\n", " 'de',\n", " 'propos',\n", " 'antisémites',\n", " '\",',\n", " 'assure',\n", " 'une',\n", " 'membre',\n", " 'du',\n", " 'comité',\n", " 'Sciences',\n", " 'Po',\n", " 'pour',\n", " 'la',\n", " 'Palestine'],\n", " 'sentence': ['Sciences Po Paris : \"Il n\\'y a pas eu de propos antisémites\", assure une membre du comité Sciences Po pour la Palestine']},\n", " {'entities': ['Ukrainiens'],\n", " 'document': ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe'],\n", " 'token': ['\"',\n", " 'Ils',\n", " 'vont',\n", " 'chez',\n", " 'les',\n", " 'gens',\n", " 'avec',\n", " 'des',\n", " 'hommes',\n", " 'armés\"\\xa0',\n", " ':',\n", " 'dans',\n", " 'les',\n", " 'zones',\n", " 'occupées',\n", " ',',\n", " 'les',\n", " 'Ukrainiens',\n", " 'sont',\n", " '\"',\n", " 'invités',\n", " '\"',\n", " 'à',\n", " 'voter',\n", " 'pour',\n", " 'la',\n", " 'présidentielle',\n", " 'russe'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Ils',\n", " 'vont',\n", " 'chez',\n", " 'les',\n", " 'gens',\n", " 'avec',\n", " 'des',\n", " 'hommes',\n", " 'armés\"\\xa0',\n", " ':',\n", " 'dans',\n", " 'les',\n", " 'zones',\n", " 'occupées',\n", " ',',\n", " 'les',\n", " 'Ukrainiens',\n", " 'sont',\n", " '\"',\n", " 'invités',\n", " '\"',\n", " 'à',\n", " 'voter',\n", " 'pour',\n", " 'la',\n", " 'présidentielle',\n", " 'russe'],\n", " 'sentence': ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe']},\n", " {'entities': ['Interview', 'Macron'],\n", " 'document': ['Interview d\\'Emmanuel Macron : \"Le président fait peur\" et souffle \"sur les braises d’un potentiel conflit mondial à des fins électorales\" selon les oppositions'],\n", " 'token': ['Interview',\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " ':',\n", " '\"',\n", " 'Le',\n", " 'président',\n", " 'fait',\n", " 'peur',\n", " '\"',\n", " 'et',\n", " 'souffle',\n", " '\"',\n", " 'sur',\n", " 'les',\n", " 'braises',\n", " 'd’un',\n", " 'potentiel',\n", " 'conflit',\n", " 'mondial',\n", " 'à',\n", " 'des',\n", " 'fins',\n", " 'électorales',\n", " '\"',\n", " 'selon',\n", " 'les',\n", " 'oppositions'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Interview',\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " ':',\n", " '\"',\n", " 'Le',\n", " 'président',\n", " 'fait',\n", " 'peur',\n", " '\"',\n", " 'et',\n", " 'souffle',\n", " '\"',\n", " 'sur',\n", " 'les',\n", " 'braises',\n", " 'd’un',\n", " 'potentiel',\n", " 'conflit',\n", " 'mondial',\n", " 'à',\n", " 'des',\n", " 'fins',\n", " 'électorales',\n", " '\"',\n", " 'selon',\n", " 'les',\n", " 'oppositions'],\n", " 'sentence': ['Interview d\\'Emmanuel Macron : \"Le président fait peur\" et souffle \"sur les braises d’un potentiel conflit mondial à des fins électorales\" selon les oppositions']},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Pascal Praud',\n", " '«Honte',\n", " 'Mathias Vicherat',\n", " 'Sylvie Retailleau»'],\n", " 'document': ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»'],\n", " 'token': ['Pascal',\n", " 'Praud',\n", " ':',\n", " '«Honte',\n", " 'à',\n", " 'Mathias',\n", " 'Vicherat',\n", " 'et',\n", " 'Sylvie',\n", " 'Retailleau»'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Pascal',\n", " 'Praud',\n", " ':',\n", " '«Honte',\n", " 'à',\n", " 'Mathias',\n", " 'Vicherat',\n", " 'et',\n", " 'Sylvie',\n", " 'Retailleau»'],\n", " 'sentence': ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['JO'],\n", " 'document': [\"JO 2024 : les habitants de HLM s'exposeront à une expulsion et 9.000 euros d’amende en cas de sous-location\"],\n", " 'token': ['JO',\n", " '2024',\n", " ':',\n", " 'les',\n", " 'habitants',\n", " 'de',\n", " 'HLM',\n", " \"s'exposeront\",\n", " 'à',\n", " 'une',\n", " 'expulsion',\n", " 'et',\n", " '9.000',\n", " 'euros',\n", " 'd’amende',\n", " 'en',\n", " 'cas',\n", " 'de',\n", " 'sous-location'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['JO',\n", " '2024',\n", " ':',\n", " 'les',\n", " 'habitants',\n", " 'de',\n", " 'HLM',\n", " \"s'exposeront\",\n", " 'à',\n", " 'une',\n", " 'expulsion',\n", " 'et',\n", " '9.000',\n", " 'euros',\n", " 'd’amende',\n", " 'en',\n", " 'cas',\n", " 'de',\n", " 'sous-location'],\n", " 'sentence': [\"JO 2024 : les habitants de HLM s'exposeront à une expulsion et 9.000 euros d’amende en cas de sous-location\"]},\n", " {'entities': ['EuroMillions'],\n", " 'document': ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent'],\n", " 'token': ['EuroMillions',\n", " ':',\n", " 'voici',\n", " 'les',\n", " '13',\n", " 'numéros',\n", " 'et',\n", " '4',\n", " 'étoiles',\n", " 'qui',\n", " 'sortent',\n", " 'le',\n", " 'plus',\n", " 'souvent'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['EuroMillions',\n", " ':',\n", " 'voici',\n", " 'les',\n", " '13',\n", " 'numéros',\n", " 'et',\n", " '4',\n", " 'étoiles',\n", " 'qui',\n", " 'sortent',\n", " 'le',\n", " 'plus',\n", " 'souvent'],\n", " 'sentence': ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent']},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Pascal Praud',\n", " '«Honte',\n", " 'Mathias Vicherat',\n", " 'Sylvie Retailleau»'],\n", " 'document': ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»'],\n", " 'token': ['Pascal',\n", " 'Praud',\n", " ':',\n", " '«Honte',\n", " 'à',\n", " 'Mathias',\n", " 'Vicherat',\n", " 'et',\n", " 'Sylvie',\n", " 'Retailleau»'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Pascal',\n", " 'Praud',\n", " ':',\n", " '«Honte',\n", " 'à',\n", " 'Mathias',\n", " 'Vicherat',\n", " 'et',\n", " 'Sylvie',\n", " 'Retailleau»'],\n", " 'sentence': ['Pascal Praud : «Honte à Mathias Vicherat et Sylvie Retailleau»']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'Russie', 'l’Ukraine'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie et l’Ukraine disent avoir déjoué plusieurs attaques de drones dans la nuit'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'Russie',\n", " 'et',\n", " 'l’Ukraine',\n", " 'disent',\n", " 'avoir',\n", " 'déjoué',\n", " 'plusieurs',\n", " 'attaques',\n", " 'de',\n", " 'drones',\n", " 'dans',\n", " 'la',\n", " 'nuit'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'Russie',\n", " 'et',\n", " 'l’Ukraine',\n", " 'disent',\n", " 'avoir',\n", " 'déjoué',\n", " 'plusieurs',\n", " 'attaques',\n", " 'de',\n", " 'drones',\n", " 'dans',\n", " 'la',\n", " 'nuit'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: la Russie et l’Ukraine disent avoir déjoué plusieurs attaques de drones dans la nuit']},\n", " {'entities': ['Russie\\xa0', 'Vladimir Poutine', 'Ukraine'],\n", " 'document': ['En direct, présidentielle en Russie\\xa0: Vladimir Poutine en route vers une réélection, après deux ans de guerre en Ukraine'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'présidentielle',\n", " 'en',\n", " 'Russie\\xa0',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'en',\n", " 'route',\n", " 'vers',\n", " 'une',\n", " 'réélection',\n", " ',',\n", " 'après',\n", " 'deux',\n", " 'ans',\n", " 'de',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'présidentielle',\n", " 'en',\n", " 'Russie\\xa0',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'en',\n", " 'route',\n", " 'vers',\n", " 'une',\n", " 'réélection',\n", " ',',\n", " 'après',\n", " 'deux',\n", " 'ans',\n", " 'de',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine'],\n", " 'sentence': ['En direct, présidentielle en Russie\\xa0: Vladimir Poutine en route vers une réélection, après deux ans de guerre en Ukraine']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'Belgorod', 'Koursk', 'Russie'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: nouvelles attaques de drones ukrainiens contre les régions de Belgorod et de Koursk, dans le sud de la Russie'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'nouvelles',\n", " 'attaques',\n", " 'de',\n", " 'drones',\n", " 'ukrainiens',\n", " 'contre',\n", " 'les',\n", " 'régions',\n", " 'de',\n", " 'Belgorod',\n", " 'et',\n", " 'de',\n", " 'Koursk',\n", " ',',\n", " 'dans',\n", " 'le',\n", " 'sud',\n", " 'de',\n", " 'la',\n", " 'Russie'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'nouvelles',\n", " 'attaques',\n", " 'de',\n", " 'drones',\n", " 'ukrainiens',\n", " 'contre',\n", " 'les',\n", " 'régions',\n", " 'de',\n", " 'Belgorod',\n", " 'et',\n", " 'de',\n", " 'Koursk',\n", " ',',\n", " 'dans',\n", " 'le',\n", " 'sud',\n", " 'de',\n", " 'la',\n", " 'Russie'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: nouvelles attaques de drones ukrainiens contre les régions de Belgorod et de Koursk, dans le sud de la Russie']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['EuroMillions'],\n", " 'document': ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent'],\n", " 'token': ['EuroMillions',\n", " ':',\n", " 'voici',\n", " 'les',\n", " '13',\n", " 'numéros',\n", " 'et',\n", " '4',\n", " 'étoiles',\n", " 'qui',\n", " 'sortent',\n", " 'le',\n", " 'plus',\n", " 'souvent'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['EuroMillions',\n", " ':',\n", " 'voici',\n", " 'les',\n", " '13',\n", " 'numéros',\n", " 'et',\n", " '4',\n", " 'étoiles',\n", " 'qui',\n", " 'sortent',\n", " 'le',\n", " 'plus',\n", " 'souvent'],\n", " 'sentence': ['EuroMillions : voici les 13 numéros et 4 étoiles qui sortent le plus souvent']},\n", " {'entities': ['Jean-François Achilli', 'Franceinfo'],\n", " 'document': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " 'token': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'sentence': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»']},\n", " {'entities': ['Jean-François Achilli', 'Franceinfo'],\n", " 'document': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " 'token': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'sentence': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»']},\n", " {'entities': ['Jean-Michel Cohen', '»', 'Jordan de Luxe'],\n", " 'document': [\"Jean-Michel Cohen : «Je suis millionnaire... Ceux qui ne sont pas contents : 'bossez autant que moi !'», estime le nutritionniste chez Jordan de Luxe\"],\n", " 'token': ['Jean-Michel',\n", " 'Cohen',\n", " ':',\n", " '«Je',\n", " 'suis',\n", " 'millionnaire',\n", " '...',\n", " 'Ceux',\n", " 'qui',\n", " 'ne',\n", " 'sont',\n", " 'pas',\n", " 'contents',\n", " ':',\n", " \"'\",\n", " 'bossez',\n", " 'autant',\n", " 'que',\n", " 'moi',\n", " \"!'\",\n", " '»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'nutritionniste',\n", " 'chez',\n", " 'Jordan',\n", " 'de',\n", " 'Luxe'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Jean-Michel',\n", " 'Cohen',\n", " ':',\n", " '«Je',\n", " 'suis',\n", " 'millionnaire',\n", " '...',\n", " 'Ceux',\n", " 'qui',\n", " 'ne',\n", " 'sont',\n", " 'pas',\n", " 'contents',\n", " ':',\n", " \"'\",\n", " 'bossez',\n", " 'autant',\n", " 'que',\n", " 'moi',\n", " \"!'\",\n", " '»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'nutritionniste',\n", " 'chez',\n", " 'Jordan',\n", " 'de',\n", " 'Luxe'],\n", " 'sentence': ['Jean-Michel Cohen : «Je suis millionnaire...',\n", " \"Ceux qui ne sont pas contents : 'bossez autant que moi !'», estime le nutritionniste chez Jordan de Luxe\"]},\n", " {'entities': ['Coco', 'Gaza'],\n", " 'document': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " 'token': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC'],\n", " 'embeddings': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'sentence': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza']},\n", " {'entities': ['Saint-Malo'],\n", " 'document': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " 'token': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'sentence': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['Macron\\xa0', 'Europe', 'Russie', 'Ukraine'],\n", " 'document': ['En direct, interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'interview',\n", " 'd’Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'interview',\n", " 'd’Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'sentence': ['En direct, interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['Interview d’Emmanuel Macron\\xa0',\n", " 'Europe',\n", " 'Russie',\n", " 'Ukraine'],\n", " 'document': ['Interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " 'token': ['Interview',\n", " 'd’Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Interview',\n", " 'd’Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'sentence': ['Interview d’Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine']},\n", " {'entities': ['L’étrange', 'Jordan Bardella'],\n", " 'document': ['L’étrange genèse du livre de Jordan Bardella'],\n", " 'token': ['L’étrange', 'genèse', 'du', 'livre', 'de', 'Jordan', 'Bardella'],\n", " 'ner': ['I-PER', 'O', 'O', 'O', 'O', 'I-PER', 'I-PER'],\n", " 'embeddings': ['L’étrange',\n", " 'genèse',\n", " 'du',\n", " 'livre',\n", " 'de',\n", " 'Jordan',\n", " 'Bardella'],\n", " 'sentence': ['L’étrange genèse du livre de Jordan Bardella']},\n", " {'entities': ['«J’ai'],\n", " 'document': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " 'token': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'sentence': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'Belgorod', 'Koursk'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: des combats signalés entre soldats russes et groupes pro-ukrainiens dans les régions russes de Belgorod et Koursk'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'combats',\n", " 'signalés',\n", " 'entre',\n", " 'soldats',\n", " 'russes',\n", " 'et',\n", " 'groupes',\n", " 'pro-ukrainiens',\n", " 'dans',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et',\n", " 'Koursk'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'combats',\n", " 'signalés',\n", " 'entre',\n", " 'soldats',\n", " 'russes',\n", " 'et',\n", " 'groupes',\n", " 'pro-ukrainiens',\n", " 'dans',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et',\n", " 'Koursk'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: des combats signalés entre soldats russes et groupes pro-ukrainiens dans les régions russes de Belgorod et Koursk']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['Aubervilliers'],\n", " 'document': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " 'token': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'sentence': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet']},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Aubervilliers'],\n", " 'document': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " 'token': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'sentence': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet']},\n", " {'entities': ['accords de Munich', 'Emmanuel Macron', 'Ukraine'],\n", " 'document': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " 'token': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'sentence': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques']},\n", " {'entities': ['Koursk', 'Belgorod'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: les combattants russes pro-ukrainiens disent lancer «\\xa0des frappes massives\\xa0» dans les régions de Koursk et de Belgorod'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'combattants',\n", " 'russes',\n", " 'pro-ukrainiens',\n", " 'disent',\n", " 'lancer',\n", " '«\\xa0des',\n", " 'frappes',\n", " 'massives\\xa0»',\n", " 'dans',\n", " 'les',\n", " 'régions',\n", " 'de',\n", " 'Koursk',\n", " 'et',\n", " 'de',\n", " 'Belgorod'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'combattants',\n", " 'russes',\n", " 'pro-ukrainiens',\n", " 'disent',\n", " 'lancer',\n", " '«\\xa0des',\n", " 'frappes',\n", " 'massives\\xa0»',\n", " 'dans',\n", " 'les',\n", " 'régions',\n", " 'de',\n", " 'Koursk',\n", " 'et',\n", " 'de',\n", " 'Belgorod'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: les combattants russes pro-ukrainiens disent lancer «\\xa0des frappes massives\\xa0» dans les régions de Koursk et de Belgorod']},\n", " {'entities': ['Coco', 'Gaza'],\n", " 'document': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " 'token': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC'],\n", " 'embeddings': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'sentence': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza']},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Etats-Unis', 'Sénat', 'Chuck Schumer', 'Benyamin Nétanyahou'],\n", " 'document': ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»'],\n", " 'token': ['Aux',\n", " 'Etats-Unis',\n", " ',',\n", " 'le',\n", " 'chef',\n", " 'de',\n", " 'file',\n", " 'démocrate',\n", " 'au',\n", " 'Sénat',\n", " ',',\n", " 'Chuck',\n", " 'Schumer',\n", " ',',\n", " 'désigne',\n", " 'Benyamin',\n", " 'Nétanyahou',\n", " 'comme',\n", " 'un',\n", " '«\\xa0obstacle',\n", " 'pour',\n", " 'la',\n", " 'paix\\xa0»'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Aux',\n", " 'Etats-Unis',\n", " ',',\n", " 'le',\n", " 'chef',\n", " 'de',\n", " 'file',\n", " 'démocrate',\n", " 'au',\n", " 'Sénat',\n", " ',',\n", " 'Chuck',\n", " 'Schumer',\n", " ',',\n", " 'désigne',\n", " 'Benyamin',\n", " 'Nétanyahou',\n", " 'comme',\n", " 'un',\n", " '«\\xa0obstacle',\n", " 'pour',\n", " 'la',\n", " 'paix\\xa0»'],\n", " 'sentence': ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»']},\n", " {'entities': ['Saint-Malo'],\n", " 'document': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " 'token': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'sentence': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)']},\n", " {'entities': ['Etats-Unis', 'Sénat', 'Chuck Schumer', 'Benyamin Nétanyahou'],\n", " 'document': ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»'],\n", " 'token': ['Aux',\n", " 'Etats-Unis',\n", " ',',\n", " 'le',\n", " 'chef',\n", " 'de',\n", " 'file',\n", " 'démocrate',\n", " 'au',\n", " 'Sénat',\n", " ',',\n", " 'Chuck',\n", " 'Schumer',\n", " ',',\n", " 'désigne',\n", " 'Benyamin',\n", " 'Nétanyahou',\n", " 'comme',\n", " 'un',\n", " '«\\xa0obstacle',\n", " 'pour',\n", " 'la',\n", " 'paix\\xa0»'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Aux',\n", " 'Etats-Unis',\n", " ',',\n", " 'le',\n", " 'chef',\n", " 'de',\n", " 'file',\n", " 'démocrate',\n", " 'au',\n", " 'Sénat',\n", " ',',\n", " 'Chuck',\n", " 'Schumer',\n", " ',',\n", " 'désigne',\n", " 'Benyamin',\n", " 'Nétanyahou',\n", " 'comme',\n", " 'un',\n", " '«\\xa0obstacle',\n", " 'pour',\n", " 'la',\n", " 'paix\\xa0»'],\n", " 'sentence': ['Aux Etats-Unis, le chef de file démocrate au Sénat, Chuck Schumer, désigne Benyamin Nétanyahou comme un «\\xa0obstacle pour la paix\\xa0»']},\n", " {'entities': ['Vincent Bolloré'],\n", " 'document': ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»'],\n", " 'token': ['IVG\\xa0',\n", " ':',\n", " 'pour',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'auditionné',\n", " 'en',\n", " 'commission',\n", " 'd’enquête',\n", " ',',\n", " 'deux',\n", " '«\\xa0libertés\\xa0»',\n", " 'se',\n", " '«\\xa0heurtent\\xa0»',\n", " ',',\n", " 'dont',\n", " 'celle',\n", " '«\\xa0des',\n", " 'enfants',\n", " 'à',\n", " 'vivre\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['IVG\\xa0',\n", " ':',\n", " 'pour',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'auditionné',\n", " 'en',\n", " 'commission',\n", " 'd’enquête',\n", " ',',\n", " 'deux',\n", " '«\\xa0libertés\\xa0»',\n", " 'se',\n", " '«\\xa0heurtent\\xa0»',\n", " ',',\n", " 'dont',\n", " 'celle',\n", " '«\\xa0des',\n", " 'enfants',\n", " 'à',\n", " 'vivre\\xa0»'],\n", " 'sentence': ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»']},\n", " {'entities': ['Guerre en Ukraine\\xa0', 'Macron'],\n", " 'document': ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'métamorphose',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " ',',\n", " 'colombe',\n", " 'devenue',\n", " 'faucon'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'métamorphose',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " ',',\n", " 'colombe',\n", " 'devenue',\n", " 'faucon'],\n", " 'sentence': ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['L’étrange', 'Jordan Bardella'],\n", " 'document': ['L’étrange genèse du livre de Jordan Bardella'],\n", " 'token': ['L’étrange', 'genèse', 'du', 'livre', 'de', 'Jordan', 'Bardella'],\n", " 'ner': ['I-PER', 'O', 'O', 'O', 'O', 'I-PER', 'I-PER'],\n", " 'embeddings': ['L’étrange',\n", " 'genèse',\n", " 'du',\n", " 'livre',\n", " 'de',\n", " 'Jordan',\n", " 'Bardella'],\n", " 'sentence': ['L’étrange genèse du livre de Jordan Bardella']},\n", " {'entities': ['Saint-Malo'],\n", " 'document': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " 'token': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'sentence': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)']},\n", " {'entities': ['Algérie\\xa0', 'Antoine de Maximy'],\n", " 'document': ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux'],\n", " 'token': ['Algérie\\xa0',\n", " ':',\n", " 'Antoine',\n", " 'de',\n", " 'Maximy',\n", " 'a',\n", " 'eu',\n", " 'du',\n", " 'mal',\n", " 'à',\n", " 'dormir',\n", " 'chez',\n", " 'eux'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Algérie\\xa0',\n", " ':',\n", " 'Antoine',\n", " 'de',\n", " 'Maximy',\n", " 'a',\n", " 'eu',\n", " 'du',\n", " 'mal',\n", " 'à',\n", " 'dormir',\n", " 'chez',\n", " 'eux'],\n", " 'sentence': ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Algérie\\xa0', 'Antoine de Maximy'],\n", " 'document': ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux'],\n", " 'token': ['Algérie\\xa0',\n", " ':',\n", " 'Antoine',\n", " 'de',\n", " 'Maximy',\n", " 'a',\n", " 'eu',\n", " 'du',\n", " 'mal',\n", " 'à',\n", " 'dormir',\n", " 'chez',\n", " 'eux'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Algérie\\xa0',\n", " ':',\n", " 'Antoine',\n", " 'de',\n", " 'Maximy',\n", " 'a',\n", " 'eu',\n", " 'du',\n", " 'mal',\n", " 'à',\n", " 'dormir',\n", " 'chez',\n", " 'eux'],\n", " 'sentence': ['Algérie\\xa0: Antoine de Maximy a eu du mal à dormir chez eux']},\n", " {'entities': ['Marco Mouly'],\n", " 'document': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " 'token': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'sentence': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'Belgorod', 'Le point'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: bombardements à Belgorod, mort de deux civils... Le point sur la situation'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'bombardements',\n", " 'à',\n", " 'Belgorod',\n", " ',',\n", " 'mort',\n", " 'de',\n", " 'deux',\n", " 'civils',\n", " '...',\n", " 'Le',\n", " 'point',\n", " 'sur',\n", " 'la',\n", " 'situation'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'bombardements',\n", " 'à',\n", " 'Belgorod',\n", " ',',\n", " 'mort',\n", " 'de',\n", " 'deux',\n", " 'civils',\n", " '...',\n", " 'Le',\n", " 'point',\n", " 'sur',\n", " 'la',\n", " 'situation'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: bombardements à Belgorod, mort de deux civils...',\n", " 'Le point sur la situation']},\n", " {'entities': ['Ligue des champions', 'PSG', 'FC Barcelone', 'Suivez'],\n", " 'document': ['Tirage au sort de la Ligue des champions : le PSG face au FC Barcelone en quarts de finale... Suivez et commentez avec nous'],\n", " 'token': ['Tirage',\n", " 'au',\n", " 'sort',\n", " 'de',\n", " 'la',\n", " 'Ligue',\n", " 'des',\n", " 'champions',\n", " ':',\n", " 'le',\n", " 'PSG',\n", " 'face',\n", " 'au',\n", " 'FC',\n", " 'Barcelone',\n", " 'en',\n", " 'quarts',\n", " 'de',\n", " 'finale',\n", " '...',\n", " 'Suivez',\n", " 'et',\n", " 'commentez',\n", " 'avec',\n", " 'nous'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Tirage',\n", " 'au',\n", " 'sort',\n", " 'de',\n", " 'la',\n", " 'Ligue',\n", " 'des',\n", " 'champions',\n", " ':',\n", " 'le',\n", " 'PSG',\n", " 'face',\n", " 'au',\n", " 'FC',\n", " 'Barcelone',\n", " 'en',\n", " 'quarts',\n", " 'de',\n", " 'finale',\n", " '...',\n", " 'Suivez',\n", " 'et',\n", " 'commentez',\n", " 'avec',\n", " 'nous'],\n", " 'sentence': ['Tirage au sort de la Ligue des champions : le PSG face au FC Barcelone en quarts de finale...',\n", " 'Suivez et commentez avec nous']},\n", " {'entities': ['Jean-François Achilli', 'Franceinfo'],\n", " 'document': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " 'token': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'sentence': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»']},\n", " {'entities': ['Sciences Po', 'Chloé Morin'],\n", " 'document': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"],\n", " 'token': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['«Il',\n", " 'y',\n", " 'a',\n", " 'un',\n", " 'problème',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " ':',\n", " 'la',\n", " 'moitié',\n", " 'des',\n", " 'étudiants',\n", " 'sont',\n", " 'étrangers»',\n", " ',',\n", " 'affirme',\n", " \"l'essayiste\",\n", " 'Chloé',\n", " 'Morin'],\n", " 'sentence': [\"«Il y a un problème à Sciences Po : la moitié des étudiants sont étrangers», affirme l'essayiste Chloé Morin\"]},\n", " {'entities': ['Gérard Miller', 'Paris 8'],\n", " 'document': [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"],\n", " 'token': ['Affaire',\n", " 'Gérard',\n", " 'Miller',\n", " ':',\n", " 'une',\n", " 'alerte',\n", " 'sur',\n", " 'son',\n", " 'comportement',\n", " 'avait',\n", " 'été',\n", " 'lancée',\n", " 'auprès',\n", " 'de',\n", " 'la',\n", " 'direction',\n", " 'de',\n", " \"l'université\",\n", " 'Paris',\n", " '8'],\n", " 'ner': ['O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['Affaire',\n", " 'Gérard',\n", " 'Miller',\n", " ':',\n", " 'une',\n", " 'alerte',\n", " 'sur',\n", " 'son',\n", " 'comportement',\n", " 'avait',\n", " 'été',\n", " 'lancée',\n", " 'auprès',\n", " 'de',\n", " 'la',\n", " 'direction',\n", " 'de',\n", " \"l'université\",\n", " 'Paris',\n", " '8'],\n", " 'sentence': [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"]},\n", " {'entities': ['Vincent Bolloré'],\n", " 'document': ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»'],\n", " 'token': ['IVG\\xa0',\n", " ':',\n", " 'pour',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'auditionné',\n", " 'en',\n", " 'commission',\n", " 'd’enquête',\n", " ',',\n", " 'deux',\n", " '«\\xa0libertés\\xa0»',\n", " 'se',\n", " '«\\xa0heurtent\\xa0»',\n", " ',',\n", " 'dont',\n", " 'celle',\n", " '«\\xa0des',\n", " 'enfants',\n", " 'à',\n", " 'vivre\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['IVG\\xa0',\n", " ':',\n", " 'pour',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'auditionné',\n", " 'en',\n", " 'commission',\n", " 'd’enquête',\n", " ',',\n", " 'deux',\n", " '«\\xa0libertés\\xa0»',\n", " 'se',\n", " '«\\xa0heurtent\\xa0»',\n", " ',',\n", " 'dont',\n", " 'celle',\n", " '«\\xa0des',\n", " 'enfants',\n", " 'à',\n", " 'vivre\\xa0»'],\n", " 'sentence': ['IVG\\xa0: pour Vincent Bolloré, auditionné en commission d’enquête, deux «\\xa0libertés\\xa0» se «\\xa0heurtent\\xa0», dont celle «\\xa0des enfants à vivre\\xa0»']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Enzo'],\n", " 'document': ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers'],\n", " 'token': ['Enzo',\n", " ',',\n", " '15\\xa0ans',\n", " ',',\n", " 'poignardé',\n", " 'à',\n", " 'mort',\n", " 'à',\n", " 'l’été\\xa02023\\xa0',\n", " ':',\n", " 'les',\n", " 'ressorts',\n", " 'politiques',\n", " 'd’un',\n", " 'fait',\n", " 'divers'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Enzo',\n", " ',',\n", " '15\\xa0ans',\n", " ',',\n", " 'poignardé',\n", " 'à',\n", " 'mort',\n", " 'à',\n", " 'l’été\\xa02023\\xa0',\n", " ':',\n", " 'les',\n", " 'ressorts',\n", " 'politiques',\n", " 'd’un',\n", " 'fait',\n", " 'divers'],\n", " 'sentence': ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers']},\n", " {'entities': ['«J’ai'],\n", " 'document': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " 'token': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'sentence': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés']},\n", " {'entities': ['BFM-TV', 'RMC', 'Rodolphe Saadé'],\n", " 'document': ['BFM-TV et RMC vendus à l’armateur Rodolphe Saadé'],\n", " 'token': ['BFM-TV',\n", " 'et',\n", " 'RMC',\n", " 'vendus',\n", " 'à',\n", " 'l’armateur',\n", " 'Rodolphe',\n", " 'Saadé'],\n", " 'ner': ['I-MISC', 'O', 'I-ORG', 'O', 'O', 'O', 'I-PER', 'I-PER'],\n", " 'embeddings': ['BFM-TV',\n", " 'et',\n", " 'RMC',\n", " 'vendus',\n", " 'à',\n", " 'l’armateur',\n", " 'Rodolphe',\n", " 'Saadé'],\n", " 'sentence': ['BFM-TV et RMC vendus à l’armateur Rodolphe Saadé']},\n", " {'entities': ['«\\xa0Dubaï Papers\\xa0»\\xa0'],\n", " 'document': ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale'],\n", " 'token': ['«\\xa0Dubaï',\n", " 'Papers\\xa0»\\xa0',\n", " ':',\n", " 'une',\n", " 'famille',\n", " 'française',\n", " 'qui',\n", " 'avait',\n", " 'caché',\n", " 'plus',\n", " 'de',\n", " '100\\xa0millions',\n", " 'd’euros',\n", " 'à',\n", " 'l’étranger',\n", " 'condamnée',\n", " 'pour',\n", " 'fraude',\n", " 'fiscale'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«\\xa0Dubaï',\n", " 'Papers\\xa0»\\xa0',\n", " ':',\n", " 'une',\n", " 'famille',\n", " 'française',\n", " 'qui',\n", " 'avait',\n", " 'caché',\n", " 'plus',\n", " 'de',\n", " '100\\xa0millions',\n", " 'd’euros',\n", " 'à',\n", " 'l’étranger',\n", " 'condamnée',\n", " 'pour',\n", " 'fraude',\n", " 'fiscale'],\n", " 'sentence': ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale']},\n", " {'entities': ['Jean-François Achilli', 'Franceinfo'],\n", " 'document': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»'],\n", " 'token': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'journaliste',\n", " 'Jean-François',\n", " 'Achilli',\n", " 'est',\n", " 'suspendu',\n", " 'de',\n", " 'l’antenne',\n", " 'de',\n", " 'Franceinfo',\n", " '«\\xa0à',\n", " 'titre',\n", " 'conservatoire\\xa0»'],\n", " 'sentence': ['Le journaliste Jean-François Achilli est suspendu de l’antenne de Franceinfo «\\xa0à titre conservatoire\\xa0»']},\n", " {'entities': ['Guerre en Ukraine', 'Macron', 'France 2', 'TF1'],\n", " 'document': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'sentence': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"]},\n", " {'entities': ['Teddy Riner', 'Kylian Mbappé', 'JO'],\n", " 'document': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " 'token': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER'],\n", " 'embeddings': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'sentence': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO']},\n", " {'entities': ['Saint-Malo'],\n", " 'document': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " 'token': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'sentence': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)']},\n", " {'entities': ['Guerre en Ukraine', 'Vladimir Poutine', 'Macron', 'Russie'],\n", " 'document': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'sentence': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie']},\n", " {'entities': ['Sciences Po Paris'],\n", " 'document': [\"On vous explique la polémique autour d'une mobilisation pro-palestinienne à Sciences Po Paris\"],\n", " 'token': ['On',\n", " 'vous',\n", " 'explique',\n", " 'la',\n", " 'polémique',\n", " 'autour',\n", " \"d'une\",\n", " 'mobilisation',\n", " 'pro-palestinienne',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " 'Paris'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG'],\n", " 'embeddings': ['On',\n", " 'vous',\n", " 'explique',\n", " 'la',\n", " 'polémique',\n", " 'autour',\n", " \"d'une\",\n", " 'mobilisation',\n", " 'pro-palestinienne',\n", " 'à',\n", " 'Sciences',\n", " 'Po',\n", " 'Paris'],\n", " 'sentence': [\"On vous explique la polémique autour d'une mobilisation pro-palestinienne à Sciences Po Paris\"]},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['«\\xa0Dubaï Papers\\xa0»\\xa0'],\n", " 'document': ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale'],\n", " 'token': ['«\\xa0Dubaï',\n", " 'Papers\\xa0»\\xa0',\n", " ':',\n", " 'une',\n", " 'famille',\n", " 'française',\n", " 'qui',\n", " 'avait',\n", " 'caché',\n", " 'plus',\n", " 'de',\n", " '100\\xa0millions',\n", " 'd’euros',\n", " 'à',\n", " 'l’étranger',\n", " 'condamnée',\n", " 'pour',\n", " 'fraude',\n", " 'fiscale'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«\\xa0Dubaï',\n", " 'Papers\\xa0»\\xa0',\n", " ':',\n", " 'une',\n", " 'famille',\n", " 'française',\n", " 'qui',\n", " 'avait',\n", " 'caché',\n", " 'plus',\n", " 'de',\n", " '100\\xa0millions',\n", " 'd’euros',\n", " 'à',\n", " 'l’étranger',\n", " 'condamnée',\n", " 'pour',\n", " 'fraude',\n", " 'fiscale'],\n", " 'sentence': ['«\\xa0Dubaï Papers\\xa0»\\xa0: une famille française qui avait caché plus de 100\\xa0millions d’euros à l’étranger condamnée pour fraude fiscale']},\n", " {'entities': ['«J’ai'],\n", " 'document': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " 'token': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'sentence': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés']},\n", " {'entities': ['guerre en\\xa0Ukraine\\xa0', 'Vladimir Poutine'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: «\\xa0D’un point de\\xa0vue militaro-technique, nous sommes évidemment prêts\\xa0» à\\xa0une guerre nucléaire, rappelle Vladimir Poutine'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " '«\\xa0D’un',\n", " 'point',\n", " 'de\\xa0vue',\n", " 'militaro-technique',\n", " ',',\n", " 'nous',\n", " 'sommes',\n", " 'évidemment',\n", " 'prêts\\xa0»',\n", " 'à\\xa0une',\n", " 'guerre',\n", " 'nucléaire',\n", " ',',\n", " 'rappelle',\n", " 'Vladimir',\n", " 'Poutine'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " '«\\xa0D’un',\n", " 'point',\n", " 'de\\xa0vue',\n", " 'militaro-technique',\n", " ',',\n", " 'nous',\n", " 'sommes',\n", " 'évidemment',\n", " 'prêts\\xa0»',\n", " 'à\\xa0une',\n", " 'guerre',\n", " 'nucléaire',\n", " ',',\n", " 'rappelle',\n", " 'Vladimir',\n", " 'Poutine'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: «\\xa0D’un point de\\xa0vue militaro-technique, nous sommes évidemment prêts\\xa0» à\\xa0une guerre nucléaire, rappelle Vladimir Poutine']},\n", " {'entities': ['Starship'],\n", " 'document': ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère'],\n", " 'token': ['Pour',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " ',',\n", " 'le',\n", " 'Starship',\n", " 'se\\xa0désintègre',\n", " 'lors',\n", " 'de\\xa0sa\\xa0rentrée',\n", " 'dans',\n", " 'l’atmosphère'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'O', 'I-MISC', 'O', 'O', 'O', 'O', 'O'],\n", " 'embeddings': ['Pour',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " ',',\n", " 'le',\n", " 'Starship',\n", " 'se\\xa0désintègre',\n", " 'lors',\n", " 'de\\xa0sa\\xa0rentrée',\n", " 'dans',\n", " 'l’atmosphère'],\n", " 'sentence': ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère']},\n", " {'entities': ['Enzo'],\n", " 'document': ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers'],\n", " 'token': ['Enzo',\n", " ',',\n", " '15\\xa0ans',\n", " ',',\n", " 'poignardé',\n", " 'à',\n", " 'mort',\n", " 'à',\n", " 'l’été\\xa02023\\xa0',\n", " ':',\n", " 'les',\n", " 'ressorts',\n", " 'politiques',\n", " 'd’un',\n", " 'fait',\n", " 'divers'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Enzo',\n", " ',',\n", " '15\\xa0ans',\n", " ',',\n", " 'poignardé',\n", " 'à',\n", " 'mort',\n", " 'à',\n", " 'l’été\\xa02023\\xa0',\n", " ':',\n", " 'les',\n", " 'ressorts',\n", " 'politiques',\n", " 'd’un',\n", " 'fait',\n", " 'divers'],\n", " 'sentence': ['Enzo, 15\\xa0ans, poignardé à mort à l’été\\xa02023\\xa0: les ressorts politiques d’un fait divers']},\n", " {'entities': ['Commission européenne'],\n", " 'document': ['«\\xa0La taxe proposée par la Commission européenne rapporterait chaque année jusqu’à 57\\xa0milliards d’euros\\xa0»'],\n", " 'token': ['«\\xa0La',\n", " 'taxe',\n", " 'proposée',\n", " 'par',\n", " 'la',\n", " 'Commission',\n", " 'européenne',\n", " 'rapporterait',\n", " 'chaque',\n", " 'année',\n", " 'jusqu’à',\n", " '57\\xa0milliards',\n", " 'd’euros\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«\\xa0La',\n", " 'taxe',\n", " 'proposée',\n", " 'par',\n", " 'la',\n", " 'Commission',\n", " 'européenne',\n", " 'rapporterait',\n", " 'chaque',\n", " 'année',\n", " 'jusqu’à',\n", " '57\\xa0milliards',\n", " 'd’euros\\xa0»'],\n", " 'sentence': ['«\\xa0La taxe proposée par la Commission européenne rapporterait chaque année jusqu’à 57\\xa0milliards d’euros\\xa0»']},\n", " {'entities': ['ministre des Sports', 'Amélie Oudéa-Castéra', 'Noël Le Graët'],\n", " 'document': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " 'token': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'ner': ['O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'sentence': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"]},\n", " {'entities': ['Guerre en Ukraine\\xa0', 'Macron'],\n", " 'document': ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'métamorphose',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " ',',\n", " 'colombe',\n", " 'devenue',\n", " 'faucon'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'métamorphose',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " ',',\n", " 'colombe',\n", " 'devenue',\n", " 'faucon'],\n", " 'sentence': ['Guerre en Ukraine\\xa0: la métamorphose d’Emmanuel Macron, colombe devenue faucon']},\n", " {'entities': ['en\\xa0Ukraine\\xa0',\n", " 'Poutine',\n", " 'Kiev',\n", " 'Belgorod et\\xa0Koursk'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " 'd’attaquer',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et\\xa0Koursk',\n", " 'pour',\n", " '«\\xa0saper\\xa0»',\n", " 'la\\xa0présidentielle',\n", " 'russe'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " 'd’attaquer',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et\\xa0Koursk',\n", " 'pour',\n", " '«\\xa0saper\\xa0»',\n", " 'la\\xa0présidentielle',\n", " 'russe'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe']},\n", " {'entities': ['Médias', 'Vincent Bolloré', 'Cyril Hanouna', 'Pascal Praud'],\n", " 'document': [\"Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud... Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"],\n", " 'token': ['Médias',\n", " ':',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'Cyril',\n", " 'Hanouna',\n", " ',',\n", " 'Pascal',\n", " 'Praud',\n", " '...',\n", " 'Ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " 'leurs',\n", " 'déclarations',\n", " 'devant',\n", " 'la',\n", " 'commission',\n", " \"d'enquête\",\n", " 'de',\n", " \"l'Assemblée\",\n", " 'nationale'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Médias',\n", " ':',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'Cyril',\n", " 'Hanouna',\n", " ',',\n", " 'Pascal',\n", " 'Praud',\n", " '...',\n", " 'Ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " 'leurs',\n", " 'déclarations',\n", " 'devant',\n", " 'la',\n", " 'commission',\n", " \"d'enquête\",\n", " 'de',\n", " \"l'Assemblée\",\n", " 'nationale'],\n", " 'sentence': ['Médias : Vincent Bolloré, Cyril Hanouna, Pascal Praud...',\n", " \"Ce qu'il faut retenir de leurs déclarations devant la commission d'enquête de l'Assemblée nationale\"]},\n", " {'entities': ['Aubervilliers'],\n", " 'document': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " 'token': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'sentence': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Seine-Saint-Denis'],\n", " 'document': [\"Seine-Saint-Denis : collision entre deux jeunes à scooter et une voiture de police après un refus d'obtempérer, le conducteur est mort\"],\n", " 'token': ['Seine-Saint-Denis',\n", " ':',\n", " 'collision',\n", " 'entre',\n", " 'deux',\n", " 'jeunes',\n", " 'à',\n", " 'scooter',\n", " 'et',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'après',\n", " 'un',\n", " 'refus',\n", " \"d'obtempérer\",\n", " ',',\n", " 'le',\n", " 'conducteur',\n", " 'est',\n", " 'mort'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Seine-Saint-Denis',\n", " ':',\n", " 'collision',\n", " 'entre',\n", " 'deux',\n", " 'jeunes',\n", " 'à',\n", " 'scooter',\n", " 'et',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'après',\n", " 'un',\n", " 'refus',\n", " \"d'obtempérer\",\n", " ',',\n", " 'le',\n", " 'conducteur',\n", " 'est',\n", " 'mort'],\n", " 'sentence': [\"Seine-Saint-Denis : collision entre deux jeunes à scooter et une voiture de police après un refus d'obtempérer, le conducteur est mort\"]},\n", " {'entities': ['Guerre en Ukraine', 'Vladimir Poutine', 'Macron', 'Russie'],\n", " 'document': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'sentence': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['Marco Mouly'],\n", " 'document': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " 'token': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'sentence': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération']},\n", " {'entities': ['France', 'Cour des comptes'],\n", " 'document': ['La situation des finances publiques en France est «\\xa0préoccupante\\xa0», juge la Cour des comptes'],\n", " 'token': ['La',\n", " 'situation',\n", " 'des',\n", " 'finances',\n", " 'publiques',\n", " 'en',\n", " 'France',\n", " 'est',\n", " '«\\xa0préoccupante\\xa0»',\n", " ',',\n", " 'juge',\n", " 'la',\n", " 'Cour',\n", " 'des',\n", " 'comptes'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['La',\n", " 'situation',\n", " 'des',\n", " 'finances',\n", " 'publiques',\n", " 'en',\n", " 'France',\n", " 'est',\n", " '«\\xa0préoccupante\\xa0»',\n", " ',',\n", " 'juge',\n", " 'la',\n", " 'Cour',\n", " 'des',\n", " 'comptes'],\n", " 'sentence': ['La situation des finances publiques en France est «\\xa0préoccupante\\xa0», juge la Cour des comptes']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Russie'],\n", " 'document': ['A la veille de la présidentielle, le sud de la Russie visé par des bombardements et des incursions armées'],\n", " 'token': ['A',\n", " 'la',\n", " 'veille',\n", " 'de',\n", " 'la',\n", " 'présidentielle',\n", " ',',\n", " 'le',\n", " 'sud',\n", " 'de',\n", " 'la',\n", " 'Russie',\n", " 'visé',\n", " 'par',\n", " 'des',\n", " 'bombardements',\n", " 'et',\n", " 'des',\n", " 'incursions',\n", " 'armées'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['A',\n", " 'la',\n", " 'veille',\n", " 'de',\n", " 'la',\n", " 'présidentielle',\n", " ',',\n", " 'le',\n", " 'sud',\n", " 'de',\n", " 'la',\n", " 'Russie',\n", " 'visé',\n", " 'par',\n", " 'des',\n", " 'bombardements',\n", " 'et',\n", " 'des',\n", " 'incursions',\n", " 'armées'],\n", " 'sentence': ['A la veille de la présidentielle, le sud de la Russie visé par des bombardements et des incursions armées']},\n", " {'entities': ['Guerre en Ukraine', 'Macron', 'France 2', 'TF1'],\n", " 'document': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'sentence': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"]},\n", " {'entities': ['Starship'],\n", " 'document': ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère'],\n", " 'token': ['Pour',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " ',',\n", " 'le',\n", " 'Starship',\n", " 'se\\xa0désintègre',\n", " 'lors',\n", " 'de\\xa0sa\\xa0rentrée',\n", " 'dans',\n", " 'l’atmosphère'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'O', 'I-MISC', 'O', 'O', 'O', 'O', 'O'],\n", " 'embeddings': ['Pour',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " ',',\n", " 'le',\n", " 'Starship',\n", " 'se\\xa0désintègre',\n", " 'lors',\n", " 'de\\xa0sa\\xa0rentrée',\n", " 'dans',\n", " 'l’atmosphère'],\n", " 'sentence': ['Pour son troisième vol, le Starship se\\xa0désintègre lors de\\xa0sa\\xa0rentrée dans l’atmosphère']},\n", " {'entities': ['Intelligence', 'la France'],\n", " 'document': ['Intelligence artificielle\\xa0: un plan d’action pour placer la France «\\xa0à la pointe\\xa0»'],\n", " 'token': ['Intelligence',\n", " 'artificielle\\xa0',\n", " ':',\n", " 'un',\n", " 'plan',\n", " 'd’action',\n", " 'pour',\n", " 'placer',\n", " 'la',\n", " 'France',\n", " '«\\xa0à',\n", " 'la',\n", " 'pointe\\xa0»'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Intelligence',\n", " 'artificielle\\xa0',\n", " ':',\n", " 'un',\n", " 'plan',\n", " 'd’action',\n", " 'pour',\n", " 'placer',\n", " 'la',\n", " 'France',\n", " '«\\xa0à',\n", " 'la',\n", " 'pointe\\xa0»'],\n", " 'sentence': ['Intelligence artificielle\\xa0: un plan d’action pour placer la France «\\xa0à la pointe\\xa0»']},\n", " {'entities': ['en\\xa0Ukraine\\xa0',\n", " 'Poutine',\n", " 'Kiev',\n", " 'Belgorod et\\xa0Koursk'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " 'd’attaquer',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et\\xa0Koursk',\n", " 'pour',\n", " '«\\xa0saper\\xa0»',\n", " 'la\\xa0présidentielle',\n", " 'russe'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " 'd’attaquer',\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'de',\n", " 'Belgorod',\n", " 'et\\xa0Koursk',\n", " 'pour',\n", " '«\\xa0saper\\xa0»',\n", " 'la\\xa0présidentielle',\n", " 'russe'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: Poutine accuse Kiev d’attaquer les régions russes de Belgorod et\\xa0Koursk pour «\\xa0saper\\xa0» la\\xa0présidentielle russe']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Stéphane Plaza'],\n", " 'document': [\"Stéphane Plaza sera jugé devant le tribunal correctionnel à la fin de l'été 2024\"],\n", " 'token': ['Stéphane',\n", " 'Plaza',\n", " 'sera',\n", " 'jugé',\n", " 'devant',\n", " 'le',\n", " 'tribunal',\n", " 'correctionnel',\n", " 'à',\n", " 'la',\n", " 'fin',\n", " 'de',\n", " \"l'été\",\n", " '2024'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Stéphane',\n", " 'Plaza',\n", " 'sera',\n", " 'jugé',\n", " 'devant',\n", " 'le',\n", " 'tribunal',\n", " 'correctionnel',\n", " 'à',\n", " 'la',\n", " 'fin',\n", " 'de',\n", " \"l'été\",\n", " '2024'],\n", " 'sentence': [\"Stéphane Plaza sera jugé devant le tribunal correctionnel à la fin de l'été 2024\"]},\n", " {'entities': ['Teddy Riner', 'Kylian Mbappé', 'JO'],\n", " 'document': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " 'token': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER'],\n", " 'embeddings': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'sentence': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO']},\n", " {'entities': ['accords de Munich', 'Emmanuel Macron', 'Ukraine'],\n", " 'document': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " 'token': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'sentence': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques']},\n", " {'entities': ['Vladimir Poutine'],\n", " 'document': [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"],\n", " 'token': ['Russie',\n", " ':',\n", " 'cinq',\n", " 'choses',\n", " 'à',\n", " 'savoir',\n", " 'sur',\n", " \"l'élection\",\n", " 'présidentielle',\n", " ',',\n", " 'que',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'est',\n", " 'assuré',\n", " 'de',\n", " 'remporter'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Russie',\n", " ':',\n", " 'cinq',\n", " 'choses',\n", " 'à',\n", " 'savoir',\n", " 'sur',\n", " \"l'élection\",\n", " 'présidentielle',\n", " ',',\n", " 'que',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'est',\n", " 'assuré',\n", " 'de',\n", " 'remporter'],\n", " 'sentence': [\"Russie : cinq choses à savoir sur l'élection présidentielle, que Vladimir Poutine est assuré de remporter\"]},\n", " {'entities': ['Grêlé'],\n", " 'document': ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019'],\n", " 'token': ['Le',\n", " 'tueur',\n", " 'en',\n", " 'série',\n", " '\"',\n", " 'le',\n", " 'Grêlé',\n", " '\"',\n", " 'avait',\n", " 'participé',\n", " 'à',\n", " \"l'émission\",\n", " '\"',\n", " 'Tout',\n", " 'le',\n", " 'monde',\n", " 'veut',\n", " 'prendre',\n", " 'sa',\n", " 'place',\n", " '\"',\n", " 'en',\n", " '2019'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'tueur',\n", " 'en',\n", " 'série',\n", " '\"',\n", " 'le',\n", " 'Grêlé',\n", " '\"',\n", " 'avait',\n", " 'participé',\n", " 'à',\n", " \"l'émission\",\n", " '\"',\n", " 'Tout',\n", " 'le',\n", " 'monde',\n", " 'veut',\n", " 'prendre',\n", " 'sa',\n", " 'place',\n", " '\"',\n", " 'en',\n", " '2019'],\n", " 'sentence': ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019']},\n", " {'entities': ['Sciences Po'],\n", " 'document': ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires'],\n", " 'token': ['Sciences',\n", " 'Po',\n", " 's’embrase',\n", " 'après',\n", " 'une',\n", " 'mobilisation',\n", " 'propalestinienne',\n", " ',',\n", " 'des',\n", " 'insultes',\n", " 'entendues',\n", " 'et',\n", " 'des',\n", " 'versions',\n", " 'contradictoires'],\n", " 'ner': ['I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Sciences',\n", " 'Po',\n", " 's’embrase',\n", " 'après',\n", " 'une',\n", " 'mobilisation',\n", " 'propalestinienne',\n", " ',',\n", " 'des',\n", " 'insultes',\n", " 'entendues',\n", " 'et',\n", " 'des',\n", " 'versions',\n", " 'contradictoires'],\n", " 'sentence': ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires']},\n", " {'entities': ['«J’ai'],\n", " 'document': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés'],\n", " 'token': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«J’ai',\n", " 'besoin',\n", " 'de',\n", " 'ma',\n", " 'voiture»',\n", " ':',\n", " 'plusieurs',\n", " 'véhicules',\n", " 'désossés',\n", " 'dans',\n", " 'un',\n", " 'parking',\n", " 'parisien',\n", " ',',\n", " 'les',\n", " 'propriétaires',\n", " 'atterrés'],\n", " 'sentence': ['«J’ai besoin de ma voiture» : plusieurs véhicules désossés dans un parking parisien, les propriétaires atterrés']},\n", " {'entities': ['Vincent Bolloré', 'Canal+'],\n", " 'document': ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " 'token': ['Devant',\n", " 'les',\n", " 'députés',\n", " ',',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'président',\n", " 'du',\n", " 'groupe',\n", " 'Canal+',\n", " ',',\n", " 'assume',\n", " 'sa',\n", " 'foi',\n", " 'et',\n", " 'récuse',\n", " 'tout',\n", " '\"',\n", " 'projet',\n", " 'idéologique',\n", " '\"'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Devant',\n", " 'les',\n", " 'députés',\n", " ',',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'président',\n", " 'du',\n", " 'groupe',\n", " 'Canal+',\n", " ',',\n", " 'assume',\n", " 'sa',\n", " 'foi',\n", " 'et',\n", " 'récuse',\n", " 'tout',\n", " '\"',\n", " 'projet',\n", " 'idéologique',\n", " '\"'],\n", " 'sentence': ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"']},\n", " {'entities': ['Gérard Miller', 'Paris 8'],\n", " 'document': [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"],\n", " 'token': ['Affaire',\n", " 'Gérard',\n", " 'Miller',\n", " ':',\n", " 'une',\n", " 'alerte',\n", " 'sur',\n", " 'son',\n", " 'comportement',\n", " 'avait',\n", " 'été',\n", " 'lancée',\n", " 'auprès',\n", " 'de',\n", " 'la',\n", " 'direction',\n", " 'de',\n", " \"l'université\",\n", " 'Paris',\n", " '8'],\n", " 'ner': ['O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['Affaire',\n", " 'Gérard',\n", " 'Miller',\n", " ':',\n", " 'une',\n", " 'alerte',\n", " 'sur',\n", " 'son',\n", " 'comportement',\n", " 'avait',\n", " 'été',\n", " 'lancée',\n", " 'auprès',\n", " 'de',\n", " 'la',\n", " 'direction',\n", " 'de',\n", " \"l'université\",\n", " 'Paris',\n", " '8'],\n", " 'sentence': [\"Affaire Gérard Miller : une alerte sur son comportement avait été lancée auprès de la direction de l'université Paris 8\"]},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Teddy Riner', 'Kylian Mbappé', 'JO'],\n", " 'document': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO'],\n", " 'token': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER'],\n", " 'embeddings': ['Plusieurs',\n", " 'stars',\n", " 'du',\n", " 'sport',\n", " 'français',\n", " ',',\n", " 'dont',\n", " 'Teddy',\n", " 'Riner',\n", " 'ou',\n", " 'Kylian',\n", " 'Mbappé',\n", " ',',\n", " 'écartées',\n", " 'par',\n", " 'les',\n", " 'critères',\n", " 'de',\n", " 'désignation',\n", " 'des',\n", " 'porte-drapeaux',\n", " 'des',\n", " 'JO'],\n", " 'sentence': ['Plusieurs stars du sport français, dont Teddy Riner ou Kylian Mbappé, écartées par les critères de désignation des porte-drapeaux des JO']},\n", " {'entities': ['Madeleine Chapsal'],\n", " 'document': ['Madeleine Chapsal, journaliste et écrivaine, est morte à l’âge de 98\\xa0ans'],\n", " 'token': ['Madeleine',\n", " 'Chapsal',\n", " ',',\n", " 'journaliste',\n", " 'et',\n", " 'écrivaine',\n", " ',',\n", " 'est',\n", " 'morte',\n", " 'à',\n", " 'l’âge',\n", " 'de',\n", " '98\\xa0ans'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Madeleine',\n", " 'Chapsal',\n", " ',',\n", " 'journaliste',\n", " 'et',\n", " 'écrivaine',\n", " ',',\n", " 'est',\n", " 'morte',\n", " 'à',\n", " 'l’âge',\n", " 'de',\n", " '98\\xa0ans'],\n", " 'sentence': ['Madeleine Chapsal, journaliste et écrivaine, est morte à l’âge de 98\\xa0ans']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['en\\xa0Ukraine\\xa0'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: les sénateurs votent en faveur de l’accord bilatéral à une large majorité'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'sénateurs',\n", " 'votent',\n", " 'en',\n", " 'faveur',\n", " 'de',\n", " 'l’accord',\n", " 'bilatéral',\n", " 'à',\n", " 'une',\n", " 'large',\n", " 'majorité'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'sénateurs',\n", " 'votent',\n", " 'en',\n", " 'faveur',\n", " 'de',\n", " 'l’accord',\n", " 'bilatéral',\n", " 'à',\n", " 'une',\n", " 'large',\n", " 'majorité'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: les sénateurs votent en faveur de l’accord bilatéral à une large majorité']},\n", " {'entities': ['ministre des Sports', 'Amélie Oudéa-Castéra', 'Noël Le Graët'],\n", " 'document': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"],\n", " 'token': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'ner': ['O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['La',\n", " 'ministre',\n", " 'des',\n", " 'Sports',\n", " ',',\n", " 'Amélie',\n", " 'Oudéa-Castéra',\n", " ',',\n", " 'a',\n", " 'été',\n", " 'mise',\n", " 'en',\n", " 'examen',\n", " 'pour',\n", " 'diffamation',\n", " 'envers',\n", " \"l'ex-patron\",\n", " 'du',\n", " 'foot',\n", " 'français',\n", " 'Noël',\n", " 'Le',\n", " 'Graët'],\n", " 'sentence': [\"La ministre des Sports, Amélie Oudéa-Castéra, a été mise en examen pour diffamation envers l'ex-patron du foot français Noël Le Graët\"]},\n", " {'entities': ['accords de Munich', 'Emmanuel Macron', 'Ukraine'],\n", " 'document': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " 'token': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'sentence': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Kate Middleton'],\n", " 'document': ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " 'token': ['Kate',\n", " 'Middleton',\n", " 'présente',\n", " 'ses',\n", " 'excuses',\n", " 'après',\n", " 'la',\n", " 'publication',\n", " 'd’une',\n", " 'photo',\n", " 'retouchée',\n", " 'de',\n", " 'la',\n", " 'princesse',\n", " 'et',\n", " 'de',\n", " 'ses',\n", " 'enfants'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Kate',\n", " 'Middleton',\n", " 'présente',\n", " 'ses',\n", " 'excuses',\n", " 'après',\n", " 'la',\n", " 'publication',\n", " 'd’une',\n", " 'photo',\n", " 'retouchée',\n", " 'de',\n", " 'la',\n", " 'princesse',\n", " 'et',\n", " 'de',\n", " 'ses',\n", " 'enfants'],\n", " 'sentence': ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants']},\n", " {'entities': ['UFC', 'Benoît Saint-Denis', 'Dustin Poirier'],\n", " 'document': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier'],\n", " 'token': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['UFC',\n", " ':',\n", " 'voici',\n", " 'la',\n", " 'somme',\n", " 'touchée',\n", " 'par',\n", " 'Benoît',\n", " 'Saint-Denis',\n", " 'après',\n", " 'sa',\n", " 'défaite',\n", " 'contre',\n", " 'Dustin',\n", " 'Poirier'],\n", " 'sentence': ['UFC : voici la somme touchée par Benoît Saint-Denis après sa défaite contre Dustin Poirier']},\n", " {'entities': ['Sandrine Rousseau', 'Sarah El Haïry'],\n", " 'document': ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " 'token': ['\"',\n", " 'Nous',\n", " 'ne',\n", " 'pouvons',\n", " 'plus',\n", " 'fermer',\n", " 'les',\n", " 'yeux',\n", " 'sur',\n", " 'ce',\n", " \"qu'il\",\n", " 'se',\n", " 'passe',\n", " '\"',\n", " ':',\n", " 'passe',\n", " \"d'armes\",\n", " 'entre',\n", " 'la',\n", " 'députée',\n", " 'Sandrine',\n", " 'Rousseau',\n", " 'et',\n", " 'la',\n", " 'ministre',\n", " 'Sarah',\n", " 'El',\n", " 'Haïry',\n", " 'sur',\n", " \"l'Aide\",\n", " 'sociale',\n", " 'à',\n", " \"l'enfance\"],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Nous',\n", " 'ne',\n", " 'pouvons',\n", " 'plus',\n", " 'fermer',\n", " 'les',\n", " 'yeux',\n", " 'sur',\n", " 'ce',\n", " \"qu'il\",\n", " 'se',\n", " 'passe',\n", " '\"',\n", " ':',\n", " 'passe',\n", " \"d'armes\",\n", " 'entre',\n", " 'la',\n", " 'députée',\n", " 'Sandrine',\n", " 'Rousseau',\n", " 'et',\n", " 'la',\n", " 'ministre',\n", " 'Sarah',\n", " 'El',\n", " 'Haïry',\n", " 'sur',\n", " \"l'Aide\",\n", " 'sociale',\n", " 'à',\n", " \"l'enfance\"],\n", " 'sentence': ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance']},\n", " {'entities': ['Russie', 'Vladimir Poutine'],\n", " 'document': ['Présidentielle en Russie : Vladimir Poutine demande aux électeurs de se rendre aux urnes pour \"décider de l\\'avenir de la patrie\"'],\n", " 'token': ['Présidentielle',\n", " 'en',\n", " 'Russie',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'demande',\n", " 'aux',\n", " 'électeurs',\n", " 'de',\n", " 'se',\n", " 'rendre',\n", " 'aux',\n", " 'urnes',\n", " 'pour',\n", " '\"',\n", " 'décider',\n", " 'de',\n", " \"l'avenir\",\n", " 'de',\n", " 'la',\n", " 'patrie',\n", " '\"'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Présidentielle',\n", " 'en',\n", " 'Russie',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'demande',\n", " 'aux',\n", " 'électeurs',\n", " 'de',\n", " 'se',\n", " 'rendre',\n", " 'aux',\n", " 'urnes',\n", " 'pour',\n", " '\"',\n", " 'décider',\n", " 'de',\n", " \"l'avenir\",\n", " 'de',\n", " 'la',\n", " 'patrie',\n", " '\"'],\n", " 'sentence': ['Présidentielle en Russie : Vladimir Poutine demande aux électeurs de se rendre aux urnes pour \"décider de l\\'avenir de la patrie\"']},\n", " {'entities': ['Emmanuel Macron\\xa0', 'Europe', 'Russie', 'Ukraine'],\n", " 'document': ['Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine'],\n", " 'token': ['Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Emmanuel',\n", " 'Macron\\xa0',\n", " ':',\n", " '«\\xa0Nous',\n", " 'n’aurons',\n", " 'plus',\n", " 'de',\n", " 'sécurité\\xa0»',\n", " 'en',\n", " 'Europe',\n", " 'si',\n", " 'la',\n", " 'Russie',\n", " '«\\xa0venait',\n", " 'à',\n", " 'gagner\\xa0»',\n", " 'en',\n", " 'Ukraine'],\n", " 'sentence': ['Emmanuel Macron\\xa0: «\\xa0Nous n’aurons plus de sécurité\\xa0» en Europe si la Russie «\\xa0venait à gagner\\xa0» en Ukraine']},\n", " {'entities': ['Etats-Unis\\xa0', 'Joe Biden', 'Donald Trump'],\n", " 'document': ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel'],\n", " 'token': ['Etats-Unis\\xa0',\n", " ':',\n", " 'le',\n", " 'duel',\n", " 'entre',\n", " 'Joe',\n", " 'Biden',\n", " 'et',\n", " 'Donald',\n", " 'Trump',\n", " 'pour',\n", " 'l’élection',\n", " 'présidentielle',\n", " 'est',\n", " 'désormais',\n", " 'officiel'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Etats-Unis\\xa0',\n", " ':',\n", " 'le',\n", " 'duel',\n", " 'entre',\n", " 'Joe',\n", " 'Biden',\n", " 'et',\n", " 'Donald',\n", " 'Trump',\n", " 'pour',\n", " 'l’élection',\n", " 'présidentielle',\n", " 'est',\n", " 'désormais',\n", " 'officiel'],\n", " 'sentence': ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel']},\n", " {'entities': ['Médias', 'CMA CGM', 'BFMTV', 'RMC'],\n", " 'document': [\"Médias : le groupe CMA CGM rachète BFMTV et RMC pour 1,55 milliard d'euros\"],\n", " 'token': ['Médias',\n", " ':',\n", " 'le',\n", " 'groupe',\n", " 'CMA',\n", " 'CGM',\n", " 'rachète',\n", " 'BFMTV',\n", " 'et',\n", " 'RMC',\n", " 'pour',\n", " '1,55',\n", " 'milliard',\n", " \"d'euros\"],\n", " 'ner': ['I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Médias',\n", " ':',\n", " 'le',\n", " 'groupe',\n", " 'CMA',\n", " 'CGM',\n", " 'rachète',\n", " 'BFMTV',\n", " 'et',\n", " 'RMC',\n", " 'pour',\n", " '1,55',\n", " 'milliard',\n", " \"d'euros\"],\n", " 'sentence': [\"Médias : le groupe CMA CGM rachète BFMTV et RMC pour 1,55 milliard d'euros\"]},\n", " {'entities': ['Sandrine Rousseau', 'Sarah El Haïry'],\n", " 'document': ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance'],\n", " 'token': ['\"',\n", " 'Nous',\n", " 'ne',\n", " 'pouvons',\n", " 'plus',\n", " 'fermer',\n", " 'les',\n", " 'yeux',\n", " 'sur',\n", " 'ce',\n", " \"qu'il\",\n", " 'se',\n", " 'passe',\n", " '\"',\n", " ':',\n", " 'passe',\n", " \"d'armes\",\n", " 'entre',\n", " 'la',\n", " 'députée',\n", " 'Sandrine',\n", " 'Rousseau',\n", " 'et',\n", " 'la',\n", " 'ministre',\n", " 'Sarah',\n", " 'El',\n", " 'Haïry',\n", " 'sur',\n", " \"l'Aide\",\n", " 'sociale',\n", " 'à',\n", " \"l'enfance\"],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Nous',\n", " 'ne',\n", " 'pouvons',\n", " 'plus',\n", " 'fermer',\n", " 'les',\n", " 'yeux',\n", " 'sur',\n", " 'ce',\n", " \"qu'il\",\n", " 'se',\n", " 'passe',\n", " '\"',\n", " ':',\n", " 'passe',\n", " \"d'armes\",\n", " 'entre',\n", " 'la',\n", " 'députée',\n", " 'Sandrine',\n", " 'Rousseau',\n", " 'et',\n", " 'la',\n", " 'ministre',\n", " 'Sarah',\n", " 'El',\n", " 'Haïry',\n", " 'sur',\n", " \"l'Aide\",\n", " 'sociale',\n", " 'à',\n", " \"l'enfance\"],\n", " 'sentence': ['\"Nous ne pouvons plus fermer les yeux sur ce qu\\'il se passe\" : passe d\\'armes entre la députée Sandrine Rousseau et la ministre Sarah El Haïry sur l\\'Aide sociale à l\\'enfance']},\n", " {'entities': ['Thierry Mariani', 'Kremlin', 'RN'],\n", " 'document': ['Elections européennes 2024\\xa0: la réhabilitation de Thierry Mariani, un proche du Kremlin devenu voix du RN'],\n", " 'token': ['Elections',\n", " 'européennes',\n", " '2024\\xa0',\n", " ':',\n", " 'la',\n", " 'réhabilitation',\n", " 'de',\n", " 'Thierry',\n", " 'Mariani',\n", " ',',\n", " 'un',\n", " 'proche',\n", " 'du',\n", " 'Kremlin',\n", " 'devenu',\n", " 'voix',\n", " 'du',\n", " 'RN'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Elections',\n", " 'européennes',\n", " '2024\\xa0',\n", " ':',\n", " 'la',\n", " 'réhabilitation',\n", " 'de',\n", " 'Thierry',\n", " 'Mariani',\n", " ',',\n", " 'un',\n", " 'proche',\n", " 'du',\n", " 'Kremlin',\n", " 'devenu',\n", " 'voix',\n", " 'du',\n", " 'RN'],\n", " 'sentence': ['Elections européennes 2024\\xa0: la réhabilitation de Thierry Mariani, un proche du Kremlin devenu voix du RN']},\n", " {'entities': ['Deepfakes'],\n", " 'document': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " 'token': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'ner': ['O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'sentence': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile']},\n", " {'entities': ['Deepfakes'],\n", " 'document': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile'],\n", " 'token': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'ner': ['O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Deepfakes',\n", " '\"',\n", " 'pornographiques',\n", " ':',\n", " 'pourquoi',\n", " 'la',\n", " 'lutte',\n", " 'contre',\n", " 'les',\n", " 'images',\n", " 'générées',\n", " 'par',\n", " 'intelligence',\n", " 'artificielle',\n", " \"s'annonce\",\n", " 'difficile'],\n", " 'sentence': ['\"Deepfakes\" pornographiques : pourquoi la lutte contre les images générées par intelligence artificielle s\\'annonce difficile']},\n", " {'entities': ['Kate Middleton', '«La presse', 'Bertrand Deckers'],\n", " 'document': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " 'token': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'sentence': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers']},\n", " {'entities': ['«\\xa0La', 'Commission européenne'],\n", " 'document': ['«\\xa0La taxe sur les transactions financières proposée par la Commission européenne rapporterait chaque année jusqu’à 57 milliards d’euros\\xa0»'],\n", " 'token': ['«\\xa0La',\n", " 'taxe',\n", " 'sur',\n", " 'les',\n", " 'transactions',\n", " 'financières',\n", " 'proposée',\n", " 'par',\n", " 'la',\n", " 'Commission',\n", " 'européenne',\n", " 'rapporterait',\n", " 'chaque',\n", " 'année',\n", " 'jusqu’à',\n", " '57',\n", " 'milliards',\n", " 'd’euros\\xa0»'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«\\xa0La',\n", " 'taxe',\n", " 'sur',\n", " 'les',\n", " 'transactions',\n", " 'financières',\n", " 'proposée',\n", " 'par',\n", " 'la',\n", " 'Commission',\n", " 'européenne',\n", " 'rapporterait',\n", " 'chaque',\n", " 'année',\n", " 'jusqu’à',\n", " '57',\n", " 'milliards',\n", " 'd’euros\\xa0»'],\n", " 'sentence': ['«\\xa0La taxe sur les transactions financières proposée par la Commission européenne rapporterait chaque année jusqu’à 57 milliards d’euros\\xa0»']},\n", " {'entities': ['CNews'],\n", " 'document': ['Punaises de\\xa0lit et\\xa0immigration\\xa0: l’Arcom met en\\xa0garde la\\xa0chaîne CNews'],\n", " 'token': ['Punaises',\n", " 'de\\xa0lit',\n", " 'et\\xa0immigration\\xa0',\n", " ':',\n", " 'l’Arcom',\n", " 'met',\n", " 'en\\xa0garde',\n", " 'la\\xa0chaîne',\n", " 'CNews'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'I-PER'],\n", " 'embeddings': ['Punaises',\n", " 'de\\xa0lit',\n", " 'et\\xa0immigration\\xa0',\n", " ':',\n", " 'l’Arcom',\n", " 'met',\n", " 'en\\xa0garde',\n", " 'la\\xa0chaîne',\n", " 'CNews'],\n", " 'sentence': ['Punaises de\\xa0lit et\\xa0immigration\\xa0: l’Arcom met en\\xa0garde la\\xa0chaîne CNews']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Koursk', 'Belgorod', 'Moscou'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: des unités russes pro ukrainiennes affirment mener des attaques à Koursk et Belgorod, malgré les démentis de Moscou'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'unités',\n", " 'russes',\n", " 'pro',\n", " 'ukrainiennes',\n", " 'affirment',\n", " 'mener',\n", " 'des',\n", " 'attaques',\n", " 'à',\n", " 'Koursk',\n", " 'et',\n", " 'Belgorod',\n", " ',',\n", " 'malgré',\n", " 'les',\n", " 'démentis',\n", " 'de',\n", " 'Moscou'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'unités',\n", " 'russes',\n", " 'pro',\n", " 'ukrainiennes',\n", " 'affirment',\n", " 'mener',\n", " 'des',\n", " 'attaques',\n", " 'à',\n", " 'Koursk',\n", " 'et',\n", " 'Belgorod',\n", " ',',\n", " 'malgré',\n", " 'les',\n", " 'démentis',\n", " 'de',\n", " 'Moscou'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: des unités russes pro ukrainiennes affirment mener des attaques à Koursk et Belgorod, malgré les démentis de Moscou']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Guerre en Ukraine', 'Vladimir Poutine', 'Macron', 'Russie'],\n", " 'document': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'sentence': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie']},\n", " {'entities': ['Coco', 'Gaza'],\n", " 'document': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza'],\n", " 'token': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC'],\n", " 'embeddings': ['La',\n", " 'dessinatrice',\n", " 'Coco',\n", " 'menacée',\n", " 'de',\n", " 'mort',\n", " 'pour',\n", " 'un',\n", " 'dessin',\n", " 'sur',\n", " 'la',\n", " 'famine',\n", " 'à',\n", " 'Gaza'],\n", " 'sentence': ['La dessinatrice Coco menacée de mort pour un dessin sur la famine à Gaza']},\n", " {'entities': ['Impôts'],\n", " 'document': ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024'],\n", " 'token': ['Impôts',\n", " ':',\n", " 'voici',\n", " 'les',\n", " 'dates',\n", " 'limites',\n", " 'pour',\n", " 'remplir',\n", " 'sa',\n", " 'déclaration',\n", " 'de',\n", " 'revenus',\n", " 'en',\n", " 'ligne',\n", " 'en',\n", " '2024'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Impôts',\n", " ':',\n", " 'voici',\n", " 'les',\n", " 'dates',\n", " 'limites',\n", " 'pour',\n", " 'remplir',\n", " 'sa',\n", " 'déclaration',\n", " 'de',\n", " 'revenus',\n", " 'en',\n", " 'ligne',\n", " 'en',\n", " '2024'],\n", " 'sentence': ['Impôts : voici les dates limites pour remplir sa déclaration de revenus en ligne en 2024']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'Kstovo', 'Reuters'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: la production réduite de moitié à la raffinerie de Kstovo, endommagée par des drones, selon Reuters'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'production',\n", " 'réduite',\n", " 'de',\n", " 'moitié',\n", " 'à',\n", " 'la',\n", " 'raffinerie',\n", " 'de',\n", " 'Kstovo',\n", " ',',\n", " 'endommagée',\n", " 'par',\n", " 'des',\n", " 'drones',\n", " ',',\n", " 'selon',\n", " 'Reuters'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'la',\n", " 'production',\n", " 'réduite',\n", " 'de',\n", " 'moitié',\n", " 'à',\n", " 'la',\n", " 'raffinerie',\n", " 'de',\n", " 'Kstovo',\n", " ',',\n", " 'endommagée',\n", " 'par',\n", " 'des',\n", " 'drones',\n", " ',',\n", " 'selon',\n", " 'Reuters'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: la production réduite de moitié à la raffinerie de Kstovo, endommagée par des drones, selon Reuters']},\n", " {'entities': ['Etats-Unis\\xa0', 'Joe Biden', 'Donald Trump'],\n", " 'document': ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel'],\n", " 'token': ['Etats-Unis\\xa0',\n", " ':',\n", " 'le',\n", " 'duel',\n", " 'entre',\n", " 'Joe',\n", " 'Biden',\n", " 'et',\n", " 'Donald',\n", " 'Trump',\n", " 'pour',\n", " 'l’élection',\n", " 'présidentielle',\n", " 'est',\n", " 'désormais',\n", " 'officiel'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Etats-Unis\\xa0',\n", " ':',\n", " 'le',\n", " 'duel',\n", " 'entre',\n", " 'Joe',\n", " 'Biden',\n", " 'et',\n", " 'Donald',\n", " 'Trump',\n", " 'pour',\n", " 'l’élection',\n", " 'présidentielle',\n", " 'est',\n", " 'désormais',\n", " 'officiel'],\n", " 'sentence': ['Etats-Unis\\xa0: le duel entre Joe Biden et Donald Trump pour l’élection présidentielle est désormais officiel']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': [],\n", " 'document': ['«\\xa0Anatomie d’une chute\\xa0» poursuit son incroyable parcours avec l’Oscar du meilleur scénario original'],\n", " 'token': ['«\\xa0Anatomie',\n", " 'd’une',\n", " 'chute\\xa0»',\n", " 'poursuit',\n", " 'son',\n", " 'incroyable',\n", " 'parcours',\n", " 'avec',\n", " 'l’Oscar',\n", " 'du',\n", " 'meilleur',\n", " 'scénario',\n", " 'original'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O'],\n", " 'embeddings': ['«\\xa0Anatomie',\n", " 'd’une',\n", " 'chute\\xa0»',\n", " 'poursuit',\n", " 'son',\n", " 'incroyable',\n", " 'parcours',\n", " 'avec',\n", " 'l’Oscar',\n", " 'du',\n", " 'meilleur',\n", " 'scénario',\n", " 'original'],\n", " 'sentence': ['«\\xa0Anatomie d’une chute\\xa0» poursuit son incroyable parcours avec l’Oscar du meilleur scénario original']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Guerre en Ukraine', 'Vladimir Poutine', 'Kiev'],\n", " 'document': ['Guerre en Ukraine : Vladimir Poutine accuse Kiev d\\'attaquer les régions russes pour \"tenter d\\'empêcher\" la présidentielle'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " \"d'attaquer\",\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'pour',\n", " '\"',\n", " 'tenter',\n", " \"d'empêcher\",\n", " '\"',\n", " 'la',\n", " 'présidentielle'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'accuse',\n", " 'Kiev',\n", " \"d'attaquer\",\n", " 'les',\n", " 'régions',\n", " 'russes',\n", " 'pour',\n", " '\"',\n", " 'tenter',\n", " \"d'empêcher\",\n", " '\"',\n", " 'la',\n", " 'présidentielle'],\n", " 'sentence': ['Guerre en Ukraine : Vladimir Poutine accuse Kiev d\\'attaquer les régions russes pour \"tenter d\\'empêcher\" la présidentielle']},\n", " {'entities': ['Guerre en Ukraine', 'Vladimir Poutine', 'Macron', 'Russie'],\n", " 'document': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'Vladimir',\n", " 'Poutine',\n", " 'réagit',\n", " 'aux',\n", " 'propos',\n", " 'd’Emmanuel',\n", " 'Macron',\n", " 'concernant',\n", " 'la',\n", " 'Russie'],\n", " 'sentence': ['Guerre en Ukraine : Vladimir Poutine réagit aux propos d’Emmanuel Macron concernant la Russie']},\n", " {'entities': ['Ukraine', 'Emmanuel Macron', 'Jean-Yves Le Drian'],\n", " 'document': ['Ukraine : en assumant une \"ambiguïté stratégique\", Emmanuel Macron \"a permis une prise de conscience de l\\'ampleur du sujet\", selon Jean-Yves Le Drian'],\n", " 'token': ['Ukraine',\n", " ':',\n", " 'en',\n", " 'assumant',\n", " 'une',\n", " '\"',\n", " 'ambiguïté',\n", " 'stratégique',\n", " '\",',\n", " 'Emmanuel',\n", " 'Macron',\n", " '\"',\n", " 'a',\n", " 'permis',\n", " 'une',\n", " 'prise',\n", " 'de',\n", " 'conscience',\n", " 'de',\n", " \"l'ampleur\",\n", " 'du',\n", " 'sujet',\n", " '\",',\n", " 'selon',\n", " 'Jean-Yves',\n", " 'Le',\n", " 'Drian'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Ukraine',\n", " ':',\n", " 'en',\n", " 'assumant',\n", " 'une',\n", " '\"',\n", " 'ambiguïté',\n", " 'stratégique',\n", " '\",',\n", " 'Emmanuel',\n", " 'Macron',\n", " '\"',\n", " 'a',\n", " 'permis',\n", " 'une',\n", " 'prise',\n", " 'de',\n", " 'conscience',\n", " 'de',\n", " \"l'ampleur\",\n", " 'du',\n", " 'sujet',\n", " '\",',\n", " 'selon',\n", " 'Jean-Yves',\n", " 'Le',\n", " 'Drian'],\n", " 'sentence': ['Ukraine : en assumant une \"ambiguïté stratégique\", Emmanuel Macron \"a permis une prise de conscience de l\\'ampleur du sujet\", selon Jean-Yves Le Drian']},\n", " {'entities': ['Aubervilliers'],\n", " 'document': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet'],\n", " 'token': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'ner': ['O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['A',\n", " 'Aubervilliers',\n", " ',',\n", " 'une',\n", " 'voiture',\n", " 'de',\n", " 'police',\n", " 'a',\n", " 'percuté',\n", " 'et',\n", " 'tué',\n", " 'un',\n", " 'jeune',\n", " 'homme',\n", " 'en',\n", " 'scooter',\n", " 'après',\n", " 'une',\n", " 'embardée',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'parquet'],\n", " 'sentence': ['A Aubervilliers, une voiture de police a percuté et tué un jeune homme en scooter après une embardée, selon le parquet']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['l’Ukraine',\n", " 'Gabriel Attal',\n", " 'RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " 'document': ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " 'token': ['A',\n", " 'l’Assemblée',\n", " ',',\n", " 'l’accord',\n", " 'avec',\n", " 'l’Ukraine',\n", " 'approuvé',\n", " ',',\n", " 'Gabriel',\n", " 'Attal',\n", " 'accuse',\n", " 'le',\n", " 'RN',\n", " 'd’être',\n", " '«\\xa0pro-Poutine\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['A',\n", " 'l’Assemblée',\n", " ',',\n", " 'l’accord',\n", " 'avec',\n", " 'l’Ukraine',\n", " 'approuvé',\n", " ',',\n", " 'Gabriel',\n", " 'Attal',\n", " 'accuse',\n", " 'le',\n", " 'RN',\n", " 'd’être',\n", " '«\\xa0pro-Poutine\\xa0»'],\n", " 'sentence': ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»']},\n", " {'entities': ['Ukrainiens'],\n", " 'document': ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe'],\n", " 'token': ['\"',\n", " 'Ils',\n", " 'vont',\n", " 'chez',\n", " 'les',\n", " 'gens',\n", " 'avec',\n", " 'des',\n", " 'hommes',\n", " 'armés\"\\xa0',\n", " ':',\n", " 'dans',\n", " 'les',\n", " 'zones',\n", " 'occupées',\n", " ',',\n", " 'les',\n", " 'Ukrainiens',\n", " 'sont',\n", " '\"',\n", " 'invités',\n", " '\"',\n", " 'à',\n", " 'voter',\n", " 'pour',\n", " 'la',\n", " 'présidentielle',\n", " 'russe'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'Ils',\n", " 'vont',\n", " 'chez',\n", " 'les',\n", " 'gens',\n", " 'avec',\n", " 'des',\n", " 'hommes',\n", " 'armés\"\\xa0',\n", " ':',\n", " 'dans',\n", " 'les',\n", " 'zones',\n", " 'occupées',\n", " ',',\n", " 'les',\n", " 'Ukrainiens',\n", " 'sont',\n", " '\"',\n", " 'invités',\n", " '\"',\n", " 'à',\n", " 'voter',\n", " 'pour',\n", " 'la',\n", " 'présidentielle',\n", " 'russe'],\n", " 'sentence': ['\"Ils vont chez les gens avec des hommes armés\"\\xa0: dans les zones occupées, les Ukrainiens sont \"invités\" à voter pour la présidentielle russe']},\n", " {'entities': ['Guerre en Ukraine\\xa0'],\n", " 'document': ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'milices',\n", " 'russes',\n", " 'ont',\n", " 'attaqué',\n", " 'leur',\n", " 'propre',\n", " 'pays'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'milices',\n", " 'russes',\n", " 'ont',\n", " 'attaqué',\n", " 'leur',\n", " 'propre',\n", " 'pays'],\n", " 'sentence': ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays']},\n", " {'entities': ['Lisa', 'Louka', 'Mathéïs'],\n", " 'document': ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés'],\n", " 'token': ['Lisa',\n", " ',',\n", " 'Louka',\n", " ',',\n", " 'Mathéïs',\n", " ',',\n", " 'la',\n", " 'triste',\n", " 'chronique',\n", " 'de',\n", " 'trois',\n", " 'infanticides\\xa0annoncés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Lisa',\n", " ',',\n", " 'Louka',\n", " ',',\n", " 'Mathéïs',\n", " ',',\n", " 'la',\n", " 'triste',\n", " 'chronique',\n", " 'de',\n", " 'trois',\n", " 'infanticides\\xa0annoncés'],\n", " 'sentence': ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés']},\n", " {'entities': ['CNews', \"L'Heure des Pros\"],\n", " 'document': ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " 'token': [\"L'Arcom\",\n", " 'met',\n", " 'en',\n", " 'garde',\n", " 'la',\n", " 'chaîne',\n", " 'CNews',\n", " 'après',\n", " 'une',\n", " 'séquence',\n", " 'de',\n", " '\"',\n", " \"L'Heure\",\n", " 'des',\n", " 'Pros',\n", " '\"',\n", " 'faisant',\n", " 'le',\n", " 'lien',\n", " 'entre',\n", " 'immigration',\n", " 'et',\n", " 'punaises',\n", " 'de',\n", " 'lit'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': [\"L'Arcom\",\n", " 'met',\n", " 'en',\n", " 'garde',\n", " 'la',\n", " 'chaîne',\n", " 'CNews',\n", " 'après',\n", " 'une',\n", " 'séquence',\n", " 'de',\n", " '\"',\n", " \"L'Heure\",\n", " 'des',\n", " 'Pros',\n", " '\"',\n", " 'faisant',\n", " 'le',\n", " 'lien',\n", " 'entre',\n", " 'immigration',\n", " 'et',\n", " 'punaises',\n", " 'de',\n", " 'lit'],\n", " 'sentence': ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit']},\n", " {'entities': ['en\\xa0Ukraine\\xa0', 'TASS'],\n", " 'document': ['En direct, guerre en\\xa0Ukraine\\xa0: au moins dix régions russes attaquées, selon l’agence TASS'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'au',\n", " 'moins',\n", " 'dix',\n", " 'régions',\n", " 'russes',\n", " 'attaquées',\n", " ',',\n", " 'selon',\n", " 'l’agence',\n", " 'TASS'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en\\xa0Ukraine\\xa0',\n", " ':',\n", " 'au',\n", " 'moins',\n", " 'dix',\n", " 'régions',\n", " 'russes',\n", " 'attaquées',\n", " ',',\n", " 'selon',\n", " 'l’agence',\n", " 'TASS'],\n", " 'sentence': ['En direct, guerre en\\xa0Ukraine\\xa0: au moins dix régions russes attaquées, selon l’agence TASS']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Tesla'],\n", " 'document': ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie'],\n", " 'token': ['\"',\n", " 'On',\n", " 'continue',\n", " \"d'aller\",\n", " 'dans',\n", " 'le',\n", " 'mur',\n", " ',',\n", " 'même',\n", " 'si',\n", " 'on',\n", " 'y',\n", " 'va',\n", " 'en',\n", " 'Tesla',\n", " '\"',\n", " ':',\n", " 'dans',\n", " \"l'Allier\",\n", " ',',\n", " 'un',\n", " 'projet',\n", " 'de',\n", " 'mine',\n", " 'de',\n", " 'lithium',\n", " 'révèle',\n", " 'le',\n", " 'fossé',\n", " 'entre',\n", " 'deux',\n", " 'visions',\n", " 'de',\n", " \"l'écologie\"],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'On',\n", " 'continue',\n", " \"d'aller\",\n", " 'dans',\n", " 'le',\n", " 'mur',\n", " ',',\n", " 'même',\n", " 'si',\n", " 'on',\n", " 'y',\n", " 'va',\n", " 'en',\n", " 'Tesla',\n", " '\"',\n", " ':',\n", " 'dans',\n", " \"l'Allier\",\n", " ',',\n", " 'un',\n", " 'projet',\n", " 'de',\n", " 'mine',\n", " 'de',\n", " 'lithium',\n", " 'révèle',\n", " 'le',\n", " 'fossé',\n", " 'entre',\n", " 'deux',\n", " 'visions',\n", " 'de',\n", " \"l'écologie\"],\n", " 'sentence': ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie']},\n", " {'entities': ['Comité Palestine'],\n", " 'document': ['Étudiante juive refusée dans un amphithéâtre : \"Cette personne nous a filmés de manière ciblée\", explique un membre du Comité Palestine'],\n", " 'token': ['Étudiante',\n", " 'juive',\n", " 'refusée',\n", " 'dans',\n", " 'un',\n", " 'amphithéâtre',\n", " ':',\n", " '\"',\n", " 'Cette',\n", " 'personne',\n", " 'nous',\n", " 'a',\n", " 'filmés',\n", " 'de',\n", " 'manière',\n", " 'ciblée',\n", " '\",',\n", " 'explique',\n", " 'un',\n", " 'membre',\n", " 'du',\n", " 'Comité',\n", " 'Palestine'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG'],\n", " 'embeddings': ['Étudiante',\n", " 'juive',\n", " 'refusée',\n", " 'dans',\n", " 'un',\n", " 'amphithéâtre',\n", " ':',\n", " '\"',\n", " 'Cette',\n", " 'personne',\n", " 'nous',\n", " 'a',\n", " 'filmés',\n", " 'de',\n", " 'manière',\n", " 'ciblée',\n", " '\",',\n", " 'explique',\n", " 'un',\n", " 'membre',\n", " 'du',\n", " 'Comité',\n", " 'Palestine'],\n", " 'sentence': ['Étudiante juive refusée dans un amphithéâtre : \"Cette personne nous a filmés de manière ciblée\", explique un membre du Comité Palestine']},\n", " {'entities': ['Anonymous Sudan'],\n", " 'document': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " 'token': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'sentence': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"]},\n", " {'entities': ['Sciences Po'],\n", " 'document': ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires'],\n", " 'token': ['Sciences',\n", " 'Po',\n", " 's’embrase',\n", " 'après',\n", " 'une',\n", " 'mobilisation',\n", " 'propalestinienne',\n", " ',',\n", " 'des',\n", " 'insultes',\n", " 'entendues',\n", " 'et',\n", " 'des',\n", " 'versions',\n", " 'contradictoires'],\n", " 'ner': ['I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Sciences',\n", " 'Po',\n", " 's’embrase',\n", " 'après',\n", " 'une',\n", " 'mobilisation',\n", " 'propalestinienne',\n", " ',',\n", " 'des',\n", " 'insultes',\n", " 'entendues',\n", " 'et',\n", " 'des',\n", " 'versions',\n", " 'contradictoires'],\n", " 'sentence': ['Sciences Po s’embrase après une mobilisation propalestinienne, des insultes entendues et des versions contradictoires']},\n", " {'entities': ['Guerre en Ukraine\\xa0', 'Jordan Bardella', 'Marine Le Pen'],\n", " 'document': ['Guerre en Ukraine\\xa0: entre Jordan Bardella et Marine Le Pen, une différence de forme'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'entre',\n", " 'Jordan',\n", " 'Bardella',\n", " 'et',\n", " 'Marine',\n", " 'Le',\n", " 'Pen',\n", " ',',\n", " 'une',\n", " 'différence',\n", " 'de',\n", " 'forme'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'entre',\n", " 'Jordan',\n", " 'Bardella',\n", " 'et',\n", " 'Marine',\n", " 'Le',\n", " 'Pen',\n", " ',',\n", " 'une',\n", " 'différence',\n", " 'de',\n", " 'forme'],\n", " 'sentence': ['Guerre en Ukraine\\xa0: entre Jordan Bardella et Marine Le Pen, une différence de forme']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Alerte', 'Gard'],\n", " 'document': ['Alerte aux crues, vagues-submersion\\xa0: deux enfants de 4\\xa0et 13\\xa0ans portés disparus dans le Gard, cinq\\xa0corps retrouvés'],\n", " 'token': ['Alerte',\n", " 'aux',\n", " 'crues',\n", " ',',\n", " 'vagues-submersion\\xa0',\n", " ':',\n", " 'deux',\n", " 'enfants',\n", " 'de',\n", " '4\\xa0et',\n", " '13\\xa0ans',\n", " 'portés',\n", " 'disparus',\n", " 'dans',\n", " 'le',\n", " 'Gard',\n", " ',',\n", " 'cinq\\xa0corps',\n", " 'retrouvés'],\n", " 'ner': ['I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Alerte',\n", " 'aux',\n", " 'crues',\n", " ',',\n", " 'vagues-submersion\\xa0',\n", " ':',\n", " 'deux',\n", " 'enfants',\n", " 'de',\n", " '4\\xa0et',\n", " '13\\xa0ans',\n", " 'portés',\n", " 'disparus',\n", " 'dans',\n", " 'le',\n", " 'Gard',\n", " ',',\n", " 'cinq\\xa0corps',\n", " 'retrouvés'],\n", " 'sentence': ['Alerte aux crues, vagues-submersion\\xa0: deux enfants de 4\\xa0et 13\\xa0ans portés disparus dans le Gard, cinq\\xa0corps retrouvés']},\n", " {'entities': ['Kate Middleton', '«La presse', 'Bertrand Deckers'],\n", " 'document': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " 'token': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'sentence': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers']},\n", " {'entities': ['Marco Mouly'],\n", " 'document': ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération'],\n", " 'token': ['Escroquerie',\n", " 'à',\n", " 'la',\n", " 'taxe',\n", " 'carbone\\xa0',\n", " ':',\n", " '\"',\n", " 'Je',\n", " 'me',\n", " 'rends',\n", " '\",',\n", " 'déclare',\n", " 'Marco',\n", " 'Mouly',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Escroquerie',\n", " 'à',\n", " 'la',\n", " 'taxe',\n", " 'carbone\\xa0',\n", " ':',\n", " '\"',\n", " 'Je',\n", " 'me',\n", " 'rends',\n", " '\",',\n", " 'déclare',\n", " 'Marco',\n", " 'Mouly',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'sentence': ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération']},\n", " {'entities': ['accords de Munich', 'Emmanuel Macron', 'Ukraine'],\n", " 'document': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques'],\n", " 'token': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['L’argument',\n", " 'des',\n", " 'accords',\n", " 'de',\n", " 'Munich',\n", " ',',\n", " 'utilisé',\n", " 'par',\n", " 'Emmanuel',\n", " 'Macron',\n", " 'dans',\n", " 'le',\n", " 'contexte',\n", " 'de',\n", " 'la',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine',\n", " ',',\n", " 'a',\n", " 'ses',\n", " 'limites',\n", " 'historiques'],\n", " 'sentence': ['L’argument des accords de Munich, utilisé par Emmanuel Macron dans le contexte de la guerre en Ukraine, a ses limites historiques']},\n", " {'entities': ['Guerre en Ukraine', 'Russie', 'Emmanuel Macron'],\n", " 'document': ['Guerre en Ukraine : \"Nous ne devons pas laisser la Russie gagner\", martèle Emmanuel Macron'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " '\"',\n", " 'Nous',\n", " 'ne',\n", " 'devons',\n", " 'pas',\n", " 'laisser',\n", " 'la',\n", " 'Russie',\n", " 'gagner',\n", " '\",',\n", " 'martèle',\n", " 'Emmanuel',\n", " 'Macron'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " '\"',\n", " 'Nous',\n", " 'ne',\n", " 'devons',\n", " 'pas',\n", " 'laisser',\n", " 'la',\n", " 'Russie',\n", " 'gagner',\n", " '\",',\n", " 'martèle',\n", " 'Emmanuel',\n", " 'Macron'],\n", " 'sentence': ['Guerre en Ukraine : \"Nous ne devons pas laisser la Russie gagner\", martèle Emmanuel Macron']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Ukraine\\xa0', 'F-16'],\n", " 'document': ['En direct, guerre en Ukraine\\xa0: les premiers avions de chasse F-16 ukrainiens devraient être en service en juillet'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'premiers',\n", " 'avions',\n", " 'de',\n", " 'chasse',\n", " 'F-16',\n", " 'ukrainiens',\n", " 'devraient',\n", " 'être',\n", " 'en',\n", " 'service',\n", " 'en',\n", " 'juillet'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'les',\n", " 'premiers',\n", " 'avions',\n", " 'de',\n", " 'chasse',\n", " 'F-16',\n", " 'ukrainiens',\n", " 'devraient',\n", " 'être',\n", " 'en',\n", " 'service',\n", " 'en',\n", " 'juillet'],\n", " 'sentence': ['En direct, guerre en Ukraine\\xa0: les premiers avions de chasse F-16 ukrainiens devraient être en service en juillet']},\n", " {'entities': ['CNews', \"L'Heure des Pros\"],\n", " 'document': ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit'],\n", " 'token': [\"L'Arcom\",\n", " 'met',\n", " 'en',\n", " 'garde',\n", " 'la',\n", " 'chaîne',\n", " 'CNews',\n", " 'après',\n", " 'une',\n", " 'séquence',\n", " 'de',\n", " '\"',\n", " \"L'Heure\",\n", " 'des',\n", " 'Pros',\n", " '\"',\n", " 'faisant',\n", " 'le',\n", " 'lien',\n", " 'entre',\n", " 'immigration',\n", " 'et',\n", " 'punaises',\n", " 'de',\n", " 'lit'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': [\"L'Arcom\",\n", " 'met',\n", " 'en',\n", " 'garde',\n", " 'la',\n", " 'chaîne',\n", " 'CNews',\n", " 'après',\n", " 'une',\n", " 'séquence',\n", " 'de',\n", " '\"',\n", " \"L'Heure\",\n", " 'des',\n", " 'Pros',\n", " '\"',\n", " 'faisant',\n", " 'le',\n", " 'lien',\n", " 'entre',\n", " 'immigration',\n", " 'et',\n", " 'punaises',\n", " 'de',\n", " 'lit'],\n", " 'sentence': ['L\\'Arcom met en garde la chaîne CNews après une séquence de \"L\\'Heure des Pros\" faisant le lien entre immigration et punaises de lit']},\n", " {'entities': ['D’anciens', 'Notre-Dame de Bétharram'],\n", " 'document': ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»'],\n", " 'token': ['D’anciens',\n", " 'élèves',\n", " 'de',\n", " 'l’établissement',\n", " 'catholique',\n", " 'privé',\n", " 'Notre-Dame',\n", " 'de',\n", " 'Bétharram',\n", " 'dénoncent',\n", " 'un',\n", " '«\\xa0régime',\n", " 'de',\n", " 'la',\n", " 'terreur\\xa0»'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['D’anciens',\n", " 'élèves',\n", " 'de',\n", " 'l’établissement',\n", " 'catholique',\n", " 'privé',\n", " 'Notre-Dame',\n", " 'de',\n", " 'Bétharram',\n", " 'dénoncent',\n", " 'un',\n", " '«\\xa0régime',\n", " 'de',\n", " 'la',\n", " 'terreur\\xa0»'],\n", " 'sentence': ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»']},\n", " {'entities': ['l’Ukraine',\n", " 'Gabriel Attal',\n", " 'RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " 'document': ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»'],\n", " 'token': ['A',\n", " 'l’Assemblée',\n", " ',',\n", " 'l’accord',\n", " 'avec',\n", " 'l’Ukraine',\n", " 'approuvé',\n", " ',',\n", " 'Gabriel',\n", " 'Attal',\n", " 'accuse',\n", " 'le',\n", " 'RN',\n", " 'd’être',\n", " '«\\xa0pro-Poutine\\xa0»'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['A',\n", " 'l’Assemblée',\n", " ',',\n", " 'l’accord',\n", " 'avec',\n", " 'l’Ukraine',\n", " 'approuvé',\n", " ',',\n", " 'Gabriel',\n", " 'Attal',\n", " 'accuse',\n", " 'le',\n", " 'RN',\n", " 'd’être',\n", " '«\\xa0pro-Poutine\\xa0»'],\n", " 'sentence': ['A l’Assemblée, l’accord avec l’Ukraine approuvé, Gabriel Attal accuse le RN d’être «\\xa0pro-Poutine\\xa0»']},\n", " {'entities': ['Anonymous Sudan'],\n", " 'document': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " 'token': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'sentence': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"]},\n", " {'entities': ['Boeing 787', 'Latam Airlines', 'Nouvelle-Zélande\\xa0'],\n", " 'document': ['Un Boeing 787 de Latam Airlines rencontre un «\\xa0problème technique\\xa0» au-dessus de la Nouvelle-Zélande\\xa0; cinquante personnes blessées'],\n", " 'token': ['Un',\n", " 'Boeing',\n", " '787',\n", " 'de',\n", " 'Latam',\n", " 'Airlines',\n", " 'rencontre',\n", " 'un',\n", " '«\\xa0problème',\n", " 'technique\\xa0»',\n", " 'au-dessus',\n", " 'de',\n", " 'la',\n", " 'Nouvelle-Zélande\\xa0',\n", " ';',\n", " 'cinquante',\n", " 'personnes',\n", " 'blessées'],\n", " 'ner': ['O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Un',\n", " 'Boeing',\n", " '787',\n", " 'de',\n", " 'Latam',\n", " 'Airlines',\n", " 'rencontre',\n", " 'un',\n", " '«\\xa0problème',\n", " 'technique\\xa0»',\n", " 'au-dessus',\n", " 'de',\n", " 'la',\n", " 'Nouvelle-Zélande\\xa0',\n", " ';',\n", " 'cinquante',\n", " 'personnes',\n", " 'blessées'],\n", " 'sentence': ['Un Boeing 787 de Latam Airlines rencontre un «\\xa0problème technique\\xa0» au-dessus de la Nouvelle-Zélande\\xa0;',\n", " 'cinquante personnes blessées']},\n", " {'entities': ['Kate Middleton', '«La presse', 'Bertrand Deckers'],\n", " 'document': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " 'token': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'sentence': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers']},\n", " {'entities': ['Guerre en Ukraine\\xa0'],\n", " 'document': ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays'],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'milices',\n", " 'russes',\n", " 'ont',\n", " 'attaqué',\n", " 'leur',\n", " 'propre',\n", " 'pays'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'des',\n", " 'milices',\n", " 'russes',\n", " 'ont',\n", " 'attaqué',\n", " 'leur',\n", " 'propre',\n", " 'pays'],\n", " 'sentence': ['Guerre en Ukraine\\xa0: des milices russes ont attaqué leur propre pays']},\n", " {'entities': ['Marco Mouly'],\n", " 'document': ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération'],\n", " 'token': ['Escroquerie',\n", " 'à',\n", " 'la',\n", " 'taxe',\n", " 'carbone\\xa0',\n", " ':',\n", " '\"',\n", " 'Je',\n", " 'me',\n", " 'rends',\n", " '\",',\n", " 'déclare',\n", " 'Marco',\n", " 'Mouly',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Escroquerie',\n", " 'à',\n", " 'la',\n", " 'taxe',\n", " 'carbone\\xa0',\n", " ':',\n", " '\"',\n", " 'Je',\n", " 'me',\n", " 'rends',\n", " '\",',\n", " 'déclare',\n", " 'Marco',\n", " 'Mouly',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'sentence': ['Escroquerie à la taxe carbone\\xa0: \"Je me rends\", déclare Marco Mouly au tribunal en vue de son incarcération']},\n", " {'entities': ['Starship', 'Terre'],\n", " 'document': ['Espace : revivez le décollage réussi de Starship, avant la perte du vaisseau lors de son troisième vol test en redescendant vers la Terre'],\n", " 'token': ['Espace',\n", " ':',\n", " 'revivez',\n", " 'le',\n", " 'décollage',\n", " 'réussi',\n", " 'de',\n", " 'Starship',\n", " ',',\n", " 'avant',\n", " 'la',\n", " 'perte',\n", " 'du',\n", " 'vaisseau',\n", " 'lors',\n", " 'de',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " 'test',\n", " 'en',\n", " 'redescendant',\n", " 'vers',\n", " 'la',\n", " 'Terre'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC'],\n", " 'embeddings': ['Espace',\n", " ':',\n", " 'revivez',\n", " 'le',\n", " 'décollage',\n", " 'réussi',\n", " 'de',\n", " 'Starship',\n", " ',',\n", " 'avant',\n", " 'la',\n", " 'perte',\n", " 'du',\n", " 'vaisseau',\n", " 'lors',\n", " 'de',\n", " 'son',\n", " 'troisième',\n", " 'vol',\n", " 'test',\n", " 'en',\n", " 'redescendant',\n", " 'vers',\n", " 'la',\n", " 'Terre'],\n", " 'sentence': ['Espace : revivez le décollage réussi de Starship, avant la perte du vaisseau lors de son troisième vol test en redescendant vers la Terre']},\n", " {'entities': ['Ukraine\\xa0', 'l’Ukraine'],\n", " 'document': ['En direct, guerre en Ukraine\\xa0: un soutien direct de l’OTAN à l’Ukraine ne serait pas contraire aux règles internationales, selon le président tchèque'],\n", " 'token': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'un',\n", " 'soutien',\n", " 'direct',\n", " 'de',\n", " 'l’OTAN',\n", " 'à',\n", " 'l’Ukraine',\n", " 'ne',\n", " 'serait',\n", " 'pas',\n", " 'contraire',\n", " 'aux',\n", " 'règles',\n", " 'internationales',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'président',\n", " 'tchèque'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['En',\n", " 'direct',\n", " ',',\n", " 'guerre',\n", " 'en',\n", " 'Ukraine\\xa0',\n", " ':',\n", " 'un',\n", " 'soutien',\n", " 'direct',\n", " 'de',\n", " 'l’OTAN',\n", " 'à',\n", " 'l’Ukraine',\n", " 'ne',\n", " 'serait',\n", " 'pas',\n", " 'contraire',\n", " 'aux',\n", " 'règles',\n", " 'internationales',\n", " ',',\n", " 'selon',\n", " 'le',\n", " 'président',\n", " 'tchèque'],\n", " 'sentence': ['En direct, guerre en Ukraine\\xa0: un soutien direct de l’OTAN à l’Ukraine ne serait pas contraire aux règles internationales, selon le président tchèque']},\n", " {'entities': ['Lisa', 'Louka', 'Mathéïs'],\n", " 'document': ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés'],\n", " 'token': ['Lisa',\n", " ',',\n", " 'Louka',\n", " ',',\n", " 'Mathéïs',\n", " ',',\n", " 'la',\n", " 'triste',\n", " 'chronique',\n", " 'de',\n", " 'trois',\n", " 'infanticides\\xa0annoncés'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Lisa',\n", " ',',\n", " 'Louka',\n", " ',',\n", " 'Mathéïs',\n", " ',',\n", " 'la',\n", " 'triste',\n", " 'chronique',\n", " 'de',\n", " 'trois',\n", " 'infanticides\\xa0annoncés'],\n", " 'sentence': ['Lisa, Louka, Mathéïs, la triste chronique de trois infanticides\\xa0annoncés']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': ['Marine Le Pen', 'Macron'],\n", " 'document': ['Débat sur le soutien à l\\'Ukraine : Marine Le Pen dénonce \"les annonces guerrières d\\'Emmanuel Macron\" sur un possible envoi de troupes au sol'],\n", " 'token': ['Débat',\n", " 'sur',\n", " 'le',\n", " 'soutien',\n", " 'à',\n", " \"l'Ukraine\",\n", " ':',\n", " 'Marine',\n", " 'Le',\n", " 'Pen',\n", " 'dénonce',\n", " '\"',\n", " 'les',\n", " 'annonces',\n", " 'guerrières',\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " '\"',\n", " 'sur',\n", " 'un',\n", " 'possible',\n", " 'envoi',\n", " 'de',\n", " 'troupes',\n", " 'au',\n", " 'sol'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Débat',\n", " 'sur',\n", " 'le',\n", " 'soutien',\n", " 'à',\n", " \"l'Ukraine\",\n", " ':',\n", " 'Marine',\n", " 'Le',\n", " 'Pen',\n", " 'dénonce',\n", " '\"',\n", " 'les',\n", " 'annonces',\n", " 'guerrières',\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " '\"',\n", " 'sur',\n", " 'un',\n", " 'possible',\n", " 'envoi',\n", " 'de',\n", " 'troupes',\n", " 'au',\n", " 'sol'],\n", " 'sentence': ['Débat sur le soutien à l\\'Ukraine : Marine Le Pen dénonce \"les annonces guerrières d\\'Emmanuel Macron\" sur un possible envoi de troupes au sol']},\n", " {'entities': ['L’amiral Philippe de Gaulle', 'Général'],\n", " 'document': ['L’amiral Philippe de Gaulle, fils du Général, est mort'],\n", " 'token': ['L’amiral',\n", " 'Philippe',\n", " 'de',\n", " 'Gaulle',\n", " ',',\n", " 'fils',\n", " 'du',\n", " 'Général',\n", " ',',\n", " 'est',\n", " 'mort'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['L’amiral',\n", " 'Philippe',\n", " 'de',\n", " 'Gaulle',\n", " ',',\n", " 'fils',\n", " 'du',\n", " 'Général',\n", " ',',\n", " 'est',\n", " 'mort'],\n", " 'sentence': ['L’amiral Philippe de Gaulle, fils du Général, est mort']},\n", " {'entities': [],\n", " 'document': ['Les salariés en arrêt-maladie ont désormais droit à quatre semaines de congés payés'],\n", " 'token': ['Les',\n", " 'salariés',\n", " 'en',\n", " 'arrêt-maladie',\n", " 'ont',\n", " 'désormais',\n", " 'droit',\n", " 'à',\n", " 'quatre',\n", " 'semaines',\n", " 'de',\n", " 'congés',\n", " 'payés'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O'],\n", " 'embeddings': ['Les',\n", " 'salariés',\n", " 'en',\n", " 'arrêt-maladie',\n", " 'ont',\n", " 'désormais',\n", " 'droit',\n", " 'à',\n", " 'quatre',\n", " 'semaines',\n", " 'de',\n", " 'congés',\n", " 'payés'],\n", " 'sentence': ['Les salariés en arrêt-maladie ont désormais droit à quatre semaines de congés payés']},\n", " {'entities': ['Rennes'],\n", " 'document': ['Criminalité : un quartier de Rennes traumatisé après une nuit de fusillade'],\n", " 'token': ['Criminalité',\n", " ':',\n", " 'un',\n", " 'quartier',\n", " 'de',\n", " 'Rennes',\n", " 'traumatisé',\n", " 'après',\n", " 'une',\n", " 'nuit',\n", " 'de',\n", " 'fusillade'],\n", " 'ner': ['O', 'O', 'O', 'O', 'O', 'I-LOC', 'O', 'O', 'O', 'O', 'O', 'O'],\n", " 'embeddings': ['Criminalité',\n", " ':',\n", " 'un',\n", " 'quartier',\n", " 'de',\n", " 'Rennes',\n", " 'traumatisé',\n", " 'après',\n", " 'une',\n", " 'nuit',\n", " 'de',\n", " 'fusillade'],\n", " 'sentence': ['Criminalité : un quartier de Rennes traumatisé après une nuit de fusillade']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Hamas', 'Israël', 'Judith Butler'],\n", " 'document': ['Les attaques du Hamas en Israël, un «\\xa0acte de résistance\\xa0»\\xa0? La philosophe Judith Butler ravive la polémique à gauche'],\n", " 'token': ['Les',\n", " 'attaques',\n", " 'du',\n", " 'Hamas',\n", " 'en',\n", " 'Israël',\n", " ',',\n", " 'un',\n", " '«\\xa0acte',\n", " 'de',\n", " 'résistance\\xa0»\\xa0',\n", " '?',\n", " 'La',\n", " 'philosophe',\n", " 'Judith',\n", " 'Butler',\n", " 'ravive',\n", " 'la',\n", " 'polémique',\n", " 'à',\n", " 'gauche'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Les',\n", " 'attaques',\n", " 'du',\n", " 'Hamas',\n", " 'en',\n", " 'Israël',\n", " ',',\n", " 'un',\n", " '«\\xa0acte',\n", " 'de',\n", " 'résistance\\xa0»\\xa0',\n", " '?',\n", " 'La',\n", " 'philosophe',\n", " 'Judith',\n", " 'Butler',\n", " 'ravive',\n", " 'la',\n", " 'polémique',\n", " 'à',\n", " 'gauche'],\n", " 'sentence': ['Les attaques du Hamas en Israël, un «\\xa0acte de résistance\\xa0»\\xa0?',\n", " 'La philosophe Judith Butler ravive la polémique à gauche']},\n", " {'entities': ['Cour des comptes'],\n", " 'document': ['Adaptation au réchauffement climatique : six choses à retenir du rapport de la Cour des comptes'],\n", " 'token': ['Adaptation',\n", " 'au',\n", " 'réchauffement',\n", " 'climatique',\n", " ':',\n", " 'six',\n", " 'choses',\n", " 'à',\n", " 'retenir',\n", " 'du',\n", " 'rapport',\n", " 'de',\n", " 'la',\n", " 'Cour',\n", " 'des',\n", " 'comptes'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'I-ORG'],\n", " 'embeddings': ['Adaptation',\n", " 'au',\n", " 'réchauffement',\n", " 'climatique',\n", " ':',\n", " 'six',\n", " 'choses',\n", " 'à',\n", " 'retenir',\n", " 'du',\n", " 'rapport',\n", " 'de',\n", " 'la',\n", " 'Cour',\n", " 'des',\n", " 'comptes'],\n", " 'sentence': ['Adaptation au réchauffement climatique : six choses à retenir du rapport de la Cour des comptes']},\n", " {'entities': ['D’anciens', 'Notre-Dame de Bétharram'],\n", " 'document': ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»'],\n", " 'token': ['D’anciens',\n", " 'élèves',\n", " 'de',\n", " 'l’établissement',\n", " 'catholique',\n", " 'privé',\n", " 'Notre-Dame',\n", " 'de',\n", " 'Bétharram',\n", " 'dénoncent',\n", " 'un',\n", " '«\\xa0régime',\n", " 'de',\n", " 'la',\n", " 'terreur\\xa0»'],\n", " 'ner': ['I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['D’anciens',\n", " 'élèves',\n", " 'de',\n", " 'l’établissement',\n", " 'catholique',\n", " 'privé',\n", " 'Notre-Dame',\n", " 'de',\n", " 'Bétharram',\n", " 'dénoncent',\n", " 'un',\n", " '«\\xa0régime',\n", " 'de',\n", " 'la',\n", " 'terreur\\xa0»'],\n", " 'sentence': ['D’anciens élèves de l’établissement catholique privé Notre-Dame de Bétharram dénoncent un «\\xa0régime de la terreur\\xa0»']},\n", " {'entities': ['Laurent Nuñez'],\n", " 'document': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars'],\n", " 'token': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Nous',\n", " 'vivrons»',\n", " ':',\n", " 'Laurent',\n", " 'Nuñez',\n", " 'annonce',\n", " 'avoir',\n", " '«identifié',\n", " 'les',\n", " 'auteurs»',\n", " 'des',\n", " 'agressions',\n", " 'contre',\n", " 'le',\n", " 'collectif',\n", " 'pendant',\n", " 'la',\n", " 'manifestation',\n", " 'du',\n", " '8',\n", " 'mars'],\n", " 'sentence': ['«Nous vivrons» : Laurent Nuñez annonce avoir «identifié les auteurs» des agressions contre le collectif pendant la manifestation du 8 mars']},\n", " {'entities': [],\n", " 'document': ['\"C\\'est trop tard pour faire autrement\"\\xa0: la majorité divisée sur la stratégie anti-RN pour les élections européennes'],\n", " 'token': ['\"',\n", " \"C'est\",\n", " 'trop',\n", " 'tard',\n", " 'pour',\n", " 'faire',\n", " 'autrement\"\\xa0',\n", " ':',\n", " 'la',\n", " 'majorité',\n", " 'divisée',\n", " 'sur',\n", " 'la',\n", " 'stratégie',\n", " 'anti-RN',\n", " 'pour',\n", " 'les',\n", " 'élections',\n", " 'européennes'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " \"C'est\",\n", " 'trop',\n", " 'tard',\n", " 'pour',\n", " 'faire',\n", " 'autrement\"\\xa0',\n", " ':',\n", " 'la',\n", " 'majorité',\n", " 'divisée',\n", " 'sur',\n", " 'la',\n", " 'stratégie',\n", " 'anti-RN',\n", " 'pour',\n", " 'les',\n", " 'élections',\n", " 'européennes'],\n", " 'sentence': ['\"C\\'est trop tard pour faire autrement\"\\xa0: la majorité divisée sur la stratégie anti-RN pour les élections européennes']},\n", " {'entities': ['Tesla'],\n", " 'document': ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie'],\n", " 'token': ['\"',\n", " 'On',\n", " 'continue',\n", " \"d'aller\",\n", " 'dans',\n", " 'le',\n", " 'mur',\n", " ',',\n", " 'même',\n", " 'si',\n", " 'on',\n", " 'y',\n", " 'va',\n", " 'en',\n", " 'Tesla',\n", " '\"',\n", " ':',\n", " 'dans',\n", " \"l'Allier\",\n", " ',',\n", " 'un',\n", " 'projet',\n", " 'de',\n", " 'mine',\n", " 'de',\n", " 'lithium',\n", " 'révèle',\n", " 'le',\n", " 'fossé',\n", " 'entre',\n", " 'deux',\n", " 'visions',\n", " 'de',\n", " \"l'écologie\"],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['\"',\n", " 'On',\n", " 'continue',\n", " \"d'aller\",\n", " 'dans',\n", " 'le',\n", " 'mur',\n", " ',',\n", " 'même',\n", " 'si',\n", " 'on',\n", " 'y',\n", " 'va',\n", " 'en',\n", " 'Tesla',\n", " '\"',\n", " ':',\n", " 'dans',\n", " \"l'Allier\",\n", " ',',\n", " 'un',\n", " 'projet',\n", " 'de',\n", " 'mine',\n", " 'de',\n", " 'lithium',\n", " 'révèle',\n", " 'le',\n", " 'fossé',\n", " 'entre',\n", " 'deux',\n", " 'visions',\n", " 'de',\n", " \"l'écologie\"],\n", " 'sentence': ['\"On continue d\\'aller dans le mur, même si on y va en Tesla\" : dans l\\'Allier, un projet de mine de lithium révèle le fossé entre deux visions de l\\'écologie']},\n", " {'entities': ['Oscars\\xa02024\\xa0'],\n", " 'document': ['Oscars\\xa02024\\xa0: revivez la cérémonie avec le triomphe d’«\\xa0Oppenheimer\\xa0», et retrouvez le décryptage de notre journaliste'],\n", " 'token': ['Oscars\\xa02024\\xa0',\n", " ':',\n", " 'revivez',\n", " 'la',\n", " 'cérémonie',\n", " 'avec',\n", " 'le',\n", " 'triomphe',\n", " 'd’«\\xa0Oppenheimer\\xa0»',\n", " ',',\n", " 'et',\n", " 'retrouvez',\n", " 'le',\n", " 'décryptage',\n", " 'de',\n", " 'notre',\n", " 'journaliste'],\n", " 'ner': ['I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Oscars\\xa02024\\xa0',\n", " ':',\n", " 'revivez',\n", " 'la',\n", " 'cérémonie',\n", " 'avec',\n", " 'le',\n", " 'triomphe',\n", " 'd’«\\xa0Oppenheimer\\xa0»',\n", " ',',\n", " 'et',\n", " 'retrouvez',\n", " 'le',\n", " 'décryptage',\n", " 'de',\n", " 'notre',\n", " 'journaliste'],\n", " 'sentence': ['Oscars\\xa02024\\xa0: revivez la cérémonie avec le triomphe d’«\\xa0Oppenheimer\\xa0», et retrouvez le décryptage de notre journaliste']},\n", " {'entities': ['l’Ukraine', 'Parlement\\xa0', 'LFI', 'Kiev', 'RN s’abstiendra'],\n", " 'document': ['Débat sur l’Ukraine au Parlement\\xa0: LFI annonce voter contre la stratégie française d’aide à Kiev, le RN s’abstiendra'],\n", " 'token': ['Débat',\n", " 'sur',\n", " 'l’Ukraine',\n", " 'au',\n", " 'Parlement\\xa0',\n", " ':',\n", " 'LFI',\n", " 'annonce',\n", " 'voter',\n", " 'contre',\n", " 'la',\n", " 'stratégie',\n", " 'française',\n", " 'd’aide',\n", " 'à',\n", " 'Kiev',\n", " ',',\n", " 'le',\n", " 'RN',\n", " 's’abstiendra'],\n", " 'ner': ['O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC'],\n", " 'embeddings': ['Débat',\n", " 'sur',\n", " 'l’Ukraine',\n", " 'au',\n", " 'Parlement\\xa0',\n", " ':',\n", " 'LFI',\n", " 'annonce',\n", " 'voter',\n", " 'contre',\n", " 'la',\n", " 'stratégie',\n", " 'française',\n", " 'd’aide',\n", " 'à',\n", " 'Kiev',\n", " ',',\n", " 'le',\n", " 'RN',\n", " 's’abstiendra'],\n", " 'sentence': ['Débat sur l’Ukraine au Parlement\\xa0: LFI annonce voter contre la stratégie française d’aide à Kiev, le RN s’abstiendra']},\n", " {'entities': ['Kate Middleton', '«La presse', 'Bertrand Deckers'],\n", " 'document': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers'],\n", " 'token': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER'],\n", " 'embeddings': ['Polémique',\n", " 'sur',\n", " 'une',\n", " 'photo',\n", " 'trafiquée',\n", " 'de',\n", " 'Kate',\n", " 'Middleton',\n", " ':',\n", " '«La',\n", " 'presse',\n", " 'est',\n", " 'en',\n", " 'train',\n", " 'de',\n", " 'se',\n", " 'retourner',\n", " 'contre',\n", " 'elle',\n", " '(',\n", " '…',\n", " '),',\n", " 'cela',\n", " 'pourrait',\n", " 'devenir',\n", " 'dangereux»',\n", " ',',\n", " 'estime',\n", " 'le',\n", " 'spécialiste',\n", " 'royauté',\n", " 'Bertrand',\n", " 'Deckers'],\n", " 'sentence': ['Polémique sur une photo trafiquée de Kate Middleton : «La presse est en train de se retourner contre elle (…), cela pourrait devenir dangereux», estime le spécialiste royauté Bertrand Deckers']},\n", " {'entities': ['Grêlé'],\n", " 'document': ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019'],\n", " 'token': ['Le',\n", " 'tueur',\n", " 'en',\n", " 'série',\n", " '\"',\n", " 'le',\n", " 'Grêlé',\n", " '\"',\n", " 'avait',\n", " 'participé',\n", " 'à',\n", " \"l'émission\",\n", " '\"',\n", " 'Tout',\n", " 'le',\n", " 'monde',\n", " 'veut',\n", " 'prendre',\n", " 'sa',\n", " 'place',\n", " '\"',\n", " 'en',\n", " '2019'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Le',\n", " 'tueur',\n", " 'en',\n", " 'série',\n", " '\"',\n", " 'le',\n", " 'Grêlé',\n", " '\"',\n", " 'avait',\n", " 'participé',\n", " 'à',\n", " \"l'émission\",\n", " '\"',\n", " 'Tout',\n", " 'le',\n", " 'monde',\n", " 'veut',\n", " 'prendre',\n", " 'sa',\n", " 'place',\n", " '\"',\n", " 'en',\n", " '2019'],\n", " 'sentence': ['Le tueur en série \"le Grêlé\" avait participé à l\\'émission \"Tout le monde veut prendre sa place\" en 2019']},\n", " {'entities': ['Nom identique'],\n", " 'document': [\"Nom identique, amis en commun... assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"],\n", " 'token': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Nom',\n", " 'identique',\n", " ',',\n", " 'amis',\n", " 'en',\n", " 'commun',\n", " '.',\n", " '.',\n", " '.',\n", " 'assis',\n", " 'côte',\n", " 'à',\n", " 'côte',\n", " ',',\n", " 'ils',\n", " 'découvrent',\n", " \"qu'ils\",\n", " 'sont',\n", " 'sosies',\n", " 'durant',\n", " 'un',\n", " 'vol'],\n", " 'sentence': ['Nom identique, amis en commun.',\n", " '.',\n", " '.',\n", " \"assis côte à côte, ils découvrent qu'ils sont sosies durant un vol\"]},\n", " {'entities': ['Anonymous Sudan'],\n", " 'document': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"],\n", " 'token': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Cyberattaques',\n", " 'contre',\n", " \"l'État\",\n", " ':',\n", " 'qui',\n", " 'est',\n", " 'Anonymous',\n", " 'Sudan',\n", " 'qui',\n", " 'revendique',\n", " 'un',\n", " 'piratage',\n", " 'massif',\n", " \"d'une\",\n", " '«intensité',\n", " 'inédite»',\n", " '?'],\n", " 'sentence': [\"Cyberattaques contre l'État : qui est Anonymous Sudan qui revendique un piratage massif d'une «intensité inédite» ?\"]},\n", " {'entities': ['Kate Middleton'],\n", " 'document': ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants'],\n", " 'token': ['Kate',\n", " 'Middleton',\n", " 'présente',\n", " 'ses',\n", " 'excuses',\n", " 'après',\n", " 'la',\n", " 'publication',\n", " 'd’une',\n", " 'photo',\n", " 'retouchée',\n", " 'de',\n", " 'la',\n", " 'princesse',\n", " 'et',\n", " 'de',\n", " 'ses',\n", " 'enfants'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Kate',\n", " 'Middleton',\n", " 'présente',\n", " 'ses',\n", " 'excuses',\n", " 'après',\n", " 'la',\n", " 'publication',\n", " 'd’une',\n", " 'photo',\n", " 'retouchée',\n", " 'de',\n", " 'la',\n", " 'princesse',\n", " 'et',\n", " 'de',\n", " 'ses',\n", " 'enfants'],\n", " 'sentence': ['Kate Middleton présente ses excuses après la publication d’une photo retouchée de la princesse et de ses enfants']},\n", " {'entities': ['Saint-Malo'],\n", " 'document': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)'],\n", " 'token': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['«Marée',\n", " 'du',\n", " 'siècle»',\n", " 'à',\n", " 'Saint-Malo',\n", " ':',\n", " 'les',\n", " 'images',\n", " 'impressionnantes',\n", " 'de',\n", " 'la',\n", " 'montée',\n", " 'des',\n", " 'eaux',\n", " '(',\n", " 'vidéo',\n", " ')'],\n", " 'sentence': ['«Marée du siècle» à Saint-Malo : les images impressionnantes de la montée des eaux (vidéo)']},\n", " {'entities': ['Vincent Bolloré', 'Canal+'],\n", " 'document': ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"'],\n", " 'token': ['Devant',\n", " 'les',\n", " 'députés',\n", " ',',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'président',\n", " 'du',\n", " 'groupe',\n", " 'Canal+',\n", " ',',\n", " 'assume',\n", " 'sa',\n", " 'foi',\n", " 'et',\n", " 'récuse',\n", " 'tout',\n", " '\"',\n", " 'projet',\n", " 'idéologique',\n", " '\"'],\n", " 'ner': ['O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Devant',\n", " 'les',\n", " 'députés',\n", " ',',\n", " 'Vincent',\n", " 'Bolloré',\n", " ',',\n", " 'président',\n", " 'du',\n", " 'groupe',\n", " 'Canal+',\n", " ',',\n", " 'assume',\n", " 'sa',\n", " 'foi',\n", " 'et',\n", " 'récuse',\n", " 'tout',\n", " '\"',\n", " 'projet',\n", " 'idéologique',\n", " '\"'],\n", " 'sentence': ['Devant les députés, Vincent Bolloré, président du groupe Canal+, assume sa foi et récuse tout \"projet idéologique\"']},\n", " {'entities': ['Marco Mouly'],\n", " 'document': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération'],\n", " 'token': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'ner': ['I-PER',\n", " 'I-PER',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O'],\n", " 'embeddings': ['Marco',\n", " 'Mouly',\n", " ',',\n", " '«\\xa0roi',\n", " 'de',\n", " 'l’arnaque\\xa0»',\n", " 'à',\n", " 'la',\n", " '«\\xa0taxe',\n", " 'carbone\\xa0»',\n", " ',',\n", " 'se',\n", " 'rend',\n", " 'au',\n", " 'tribunal',\n", " 'en',\n", " 'vue',\n", " 'de',\n", " 'son',\n", " 'incarcération'],\n", " 'sentence': ['Marco Mouly, «\\xa0roi de l’arnaque\\xa0» à la «\\xa0taxe carbone\\xa0», se rend au tribunal en vue de son incarcération']}]" ] }, "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ner_list" ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'entities': ['Guerre en Ukraine', 'Macron', 'France 2', 'TF1'],\n", " 'document': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"],\n", " 'token': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'ner': ['I-MISC',\n", " 'I-MISC',\n", " 'I-LOC',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'O',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG',\n", " 'I-ORG',\n", " 'O',\n", " 'I-ORG'],\n", " 'embeddings': ['Guerre',\n", " 'en',\n", " 'Ukraine',\n", " ':',\n", " 'ce',\n", " \"qu'il\",\n", " 'faut',\n", " 'retenir',\n", " 'de',\n", " \"l'interview\",\n", " \"d'Emmanuel\",\n", " 'Macron',\n", " 'sur',\n", " 'France',\n", " '2',\n", " 'et',\n", " 'TF1'],\n", " 'sentence': [\"Guerre en Ukraine : ce qu'il faut retenir de l'interview d'Emmanuel Macron sur France 2 et TF1\"]}" ] }, "execution_count": 28, "metadata": {}, "output_type": "execute_result" }, { "name": "stderr", "output_type": "stream", "text": [ "24/03/29 09:42:52 WARN HeartbeatReceiver: Removing executor driver with no recent heartbeats: 37382339 ms exceeds timeout 120000 ms\n", "24/03/29 09:42:52 WARN SparkContext: Killing executors is not supported by current scheduler.\n", "24/03/29 09:42:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:42:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:43:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:44:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:44:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:44:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:44:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:44:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:45:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:45:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:45:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:45:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:46:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:46:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:47:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:48:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:49:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:50:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.dispatchOrAddCallback(Promise.scala:316)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.onComplete(Promise.scala:307)\n", "\tat scala.concurrent.impl.Promise.transformWith(Promise.scala:40)\n", "\tat scala.concurrent.impl.Promise.transformWith$(Promise.scala:38)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.transformWith(Promise.scala:187)\n", "\tat scala.concurrent.Future.flatMap(Future.scala:306)\n", "\tat scala.concurrent.Future.flatMap$(Future.scala:306)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.flatMap(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:150)\n", "\t... 17 more\n", "24/03/29 09:50:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:50:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:57 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:51:57 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:07 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:07 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:17 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:17 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:27 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:27 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:37 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:37 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:47 WARN Executor: Issue communicating with driver in heartbeater\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)\n", "\tat org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)\n", "\tat org.apache.spark.storage.BlockManagerMaster.registerBlockManager(BlockManagerMaster.scala:80)\n", "\tat org.apache.spark.storage.BlockManager.reregister(BlockManager.scala:642)\n", "\tat org.apache.spark.executor.Executor.reportHeartBeat(Executor.scala:1223)\n", "\tat org.apache.spark.executor.Executor.$anonfun$heartbeater$1(Executor.scala:295)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1928)\n", "\tat org.apache.spark.Heartbeater$$anon$1.run(Heartbeater.scala:46)\n", "\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n", "\tat java.base/java.util.concurrent.FutureTask.runAndReset(FutureTask.java:305)\n", "\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:305)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\t... 3 more\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:47 ERROR Inbox: Ignoring error\n", "org.apache.spark.SparkException: Exception thrown in awaitResult: \n", "\tat org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:56)\n", "\tat org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:310)\n", "\tat org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:102)\n", "\tat org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:110)\n", "\tat org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.driverEndpoint$lzycompute(BlockManagerMasterEndpoint.scala:124)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$driverEndpoint(BlockManagerMasterEndpoint.scala:123)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$lzycompute$1(BlockManagerMasterEndpoint.scala:688)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.isExecutorAlive$1(BlockManagerMasterEndpoint.scala:687)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint.org$apache$spark$storage$BlockManagerMasterEndpoint$$register(BlockManagerMasterEndpoint.scala:725)\n", "\tat org.apache.spark.storage.BlockManagerMasterEndpoint$$anonfun$receiveAndReply$1.applyOrElse(BlockManagerMasterEndpoint.scala:133)\n", "\tat org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:103)\n", "\tat org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:213)\n", "\tat org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:100)\n", "\tat org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:75)\n", "\tat org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:41)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n", "\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n", "\tat java.base/java.lang.Thread.run(Thread.java:829)\n", "Caused by: org.apache.spark.rpc.RpcEndpointNotFoundException: Cannot find endpoint: spark://CoarseGrainedScheduler@192.168.1.91:35719\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1(NettyRpcEnv.scala:148)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$asyncSetupEndpointRefByURI$1$adapted(NettyRpcEnv.scala:144)\n", "\tat scala.concurrent.Future.$anonfun$flatMap$1(Future.scala:307)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transformWith$1(Promise.scala:41)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat scala.concurrent.BatchingExecutor$Batch.processBatch$1(BatchingExecutor.scala:67)\n", "\tat scala.concurrent.BatchingExecutor$Batch.$anonfun$run$1(BatchingExecutor.scala:82)\n", "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", "\tat scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:85)\n", "\tat scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:59)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:875)\n", "\tat scala.concurrent.BatchingExecutor.execute(BatchingExecutor.scala:110)\n", "\tat scala.concurrent.BatchingExecutor.execute$(BatchingExecutor.scala:107)\n", "\tat scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:873)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.trySuccess(Promise.scala:94)\n", "\tat scala.concurrent.Promise.trySuccess$(Promise.scala:94)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.trySuccess(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.onSuccess$1(NettyRpcEnv.scala:225)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5(NettyRpcEnv.scala:239)\n", "\tat org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$5$adapted(NettyRpcEnv.scala:238)\n", "\tat scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)\n", "\tat org.apache.spark.util.ThreadUtils$$anon$1.execute(ThreadUtils.scala:99)\n", "\tat scala.concurrent.impl.ExecutionContextImpl$$anon$4.execute(ExecutionContextImpl.scala:138)\n", "\tat scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:72)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:288)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:288)\n", "\tat scala.concurrent.Promise.complete(Promise.scala:53)\n", "\tat scala.concurrent.Promise.complete$(Promise.scala:52)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:187)\n", "\tat scala.concurrent.Promise.success(Promise.scala:86)\n", "\tat scala.concurrent.Promise.success$(Promise.scala:86)\n", "\tat scala.concurrent.impl.Promise$DefaultPromise.success(Promise.scala:187)\n", "\tat org.apache.spark.rpc.netty.LocalNettyRpcCallContext.send(NettyRpcCallContext.scala:50)\n", "\tat org.apache.spark.rpc.netty.NettyRpcCallContext.reply(NettyRpcCallContext.scala:32)\n", "\tat org.apache.spark.rpc.netty.RpcEndpointVerifier$$anonfun$receiveAndReply$1.applyOrElse(RpcEndpointVerifier.scala:31)\n", "\t... 8 more\n", "24/03/29 09:52:47 ERROR Executor: Exit as unable to send heartbeats to driver more than 60 times\n" ] } ], "source": [ "ner_list[0]" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.7" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }