This is the error log
i have a problem in retrieving data from firebase realtime database. java.lang.ClassCastException: java.lang.String cannot be cast to java.util.Map
E/AndroidRuntime: FATAL EXCEPTION: main
Process: root.example.com.chatrack, PID: 31556
java.lang.ClassCastException: java.lang.String cannot be cast to java.util.Map
at root.example.com.chatrack.ChatActivity.AmbilDataGroup(ChatActivity.java:156)
at root.example.com.chatrack.ChatActivity.access$200(ChatActivity.java:48)
at root.example.com.chatrack.ChatActivity$2.onDataChange(ChatActivity.java:140)
at com.google.firebase.database.core.ValueEventRegistration.fireEvent(com.google.firebase:firebase-database##16.0.6:75)
at com.google.firebase.database.core.view.DataEvent.fire(com.google.firebase:firebase-database##16.0.6:63)
at com.google.firebase.database.core.view.EventRaiser$1.run(com.google.firebase:firebase-database##16.0.6:55)
at android.os.Handler.handleCallback(Handler.java:794)
at android.os.Handler.dispatchMessage(Handler.java:99)
at android.os.Looper.loop(Looper.java:176)
at android.app.ActivityThread.main(ActivityThread.java:6635)
at java.lang.reflect.Method.invoke(Native Method)
i dont know what's gone wrong
this the code i'm using for retrieving data
DatabaseReference add = mFirebaseDatabase.getReference().child("CHATRACK").child("USER").child(Child);
Log.d(TAG, "getFriendsInfo() returned: " + add);
add.addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(DataSnapshot dataSnapshot) {
Log.d(TAG, "onDataChange() returned: " + dataSnapshot);
Log.d(TAG, "onDataChange() returned: " + Child.size());
AmbilDataGroup((Map<String, Object>) dataSnapshot.getValue());
}
#Override
public void onCancelled(DatabaseError databaseError) {
}
});
private void AmbilDataGroup(Map<String, Object> dataSnapshot) {
final ArrayList<String> Nama = new ArrayList<>();
for (Map.Entry<String, Object> entry : dataSnapshot.entrySet())
{
Map nama= (Map) entry.getValue();
Nama.add((String) nama.get("Nama"));
}
}
why this is happen. in other activity the same code was fine and no problem.
i don't understand. please help
edit
here my database structure
enter image description here
String value = "{first_name = naresh,last_name = kumar,gender = male}";
value = value.substring(1, value.length()-1); //remove curly brackets
String[] keyValuePairs = value.split(","); //split the string to creat key-value pairs
Map<String,String> map = new HashMap<>();
for(String pair : keyValuePairs) //iterate over the pairs
{
String[] entry = pair.split("="); //split the pairs to get key and value
map.put(entry[0].trim(), entry[1].trim()); //add them to the hashmap and trim whitespaces
}
add this code in
DatabaseReference add = mFirebaseDatabase.getReference().child("CHATRACK").child("USER").child(Child);
Log.d(TAG, "getFriendsInfo() returned: " + add);
add.addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(DataSnapshot dataSnapshot) {
Log.d(TAG, "onDataChange() returned: " + dataSnapshot);
Log.d(TAG, "onDataChange() returned: " + Child.size());
String value = dataSnapshot.getValue();
value = value.substring(1, value.length()-1); //remove curly brackets
String[] keyValuePairs = value.split(","); //split the string to creat key-value pairs
Map<String,String> map = new HashMap<>();
for(String pair : keyValuePairs) //iterate over the pairs
{
String[] entry = pair.split("="); //split the pairs to get key and value
map.put(entry[0].trim(), entry[1].trim()); //add them to the hashmap and trim whitespaces
}
AmbilDataGroup(map);
}
#Override
public void onCancelled(DatabaseError databaseError) {
}
});
private void AmbilDataGroup(Map<String, Object> dataSnapshot) {
final ArrayList<String> NamaGroup = new ArrayList<>();
for (Map.Entry<String, Object> entry : dataSnapshot.entrySet()) {
Map namaGroup = (Map) entry.getValue();
NamaGroup.add((String) namaGroup.get("GroupName"));
}
final ArrayList<String> GroupMember = new ArrayList<>();
for (Map.Entry<String, Object> entry : dataSnapshot.entrySet()) {
Map groupMember = (Map) entry.getValue();
GroupMember.add((String) groupMember.get("GroupMember"));
}
final ArrayList<String> GroupId = new ArrayList<>();
for (Map.Entry<String, Object> entry : dataSnapshot.entrySet()) {
Map groupId = (Map) entry.getValue();
GroupId.add((String) groupId.get("GroupId"));
}
String[] member = GroupMember.toString().split(",");
String memberId;
if (GroupId != null) {
int i = 0;
while (GroupId.size() > i) {
memberId = GroupMember.get(i).replace("[", "").replace("]", "");
Log.d(TAG, "AmbilDataGroup() returned: " + GroupMember.get(i).split(","));
if (memberId.contains(UserId)) {
}
Log.d(TAG, "AmbilDataGroup() returned: int i" + i);
i++;
}
}
}
Related
From For each loop, inside entry.getValue(), There is another map coming from firestore. How can I get?
Code:
#Override
public void onComplete (#NonNull Task < QuerySnapshot > task) {
if (task.isSuccessful()) {
// binding.contentMain.noData.setVisibility(View.GONE);
for (QueryDocumentSnapshot document : Objects.requireNonNull(task.getResult())) {
showLog("Data: " + document.getData());
postMap.putAll(document.getData());
}
try {
for (Map.Entry<String, Object> entry : postMap.entrySet()) {
if (entry.getKey().equals(CONTENT)) {
showLog("value: " + entry.getValue().toString());
contentMap = new HashMap<>();
contentMap.putAll(entry.getValue());
}
}
} catch (Exception e) {
e.printStackTrace();
}
} else {
// binding.contentMain.noData.setVisibility(View.VISIBLE);
showLog("Error getting documents: " + task.getException());
}
}
I tried like below but compiler error. No suugestion:
Map map = new HashMap();
((Map)map.get( "keyname" )).get( "nestedkeyname" );
try this
contentMap.putAll((Map<? extends String, ? extends Object>) entry.getValue());
using your variable naming, try this
entry.iterator().next()
So if you have hashmap A:
{"key1":"value1",
"key2":"value2",
"key3":"value3"}
given the entryset of this hashMap "entrySet" you could do
entrySet.iterator().next()
which if used in a while loop can iterate throught all the key and values in the hashmap
I have 3 items in my Table, and i also use HashMap. So, my table look like this:
WIFI_TABLE
position_id routers values
ssid mac_id mac_id rssi
A AP1 23:xx:xx:xx 23:xx:xx:xx -102
B AP2 12:xx:xx:xx 12:xx:xx:xx -98
C AP3 9a:xx:xx:xx 9a:xx:xx:xx -100
I'm scanning a new data also in HashMap, and it look like this:
SCAN RESULT
position_id routers values
ssid mac_id mac_id rssi
UNKNOWN AP1 23:xx:xx:xx 23:xx:xx:xx -102
AP2 12:xx:xx:xx 12:xx:xx:xx -98
AP3 9a:xx:xx:xx 9a:xx:xx:xx -100
AP4 76:xx:xx:xx 76:xx:xx:xx -108
AP5 b8:xx:xx:xx b8:xx:xx:xx -80
As u can see, the result contain all surrounding data. I want to calculate only the same data.
First is get data from database. Then, If mac_id in scan result is equal to mac_id in database, then calculate scanned rssi - database rssi. for example, scanned mac_id = 23:xx:xx:xx is equal to database AP1 then, result = (-98) - (-102). Then get the second data, calculate it, get result2, also the third data and get the result3.
This is Position_data class:
public class PositionData implements Serializable {
private String name;
public HashMap<String, Integer> values;
public HashMap<String,String> routers;
public PositionData(String name) {
// TODO Auto-generated constructor stub
this.name=name;
values = new HashMap<String, Integer>();
routers = new HashMap<String, String>();
}
public void addValue(Router router,int strength){
values.put(router.getBSSID(), strength);
routers.put(router.getBSSID(),router.getSSID());
}
public String getName() {
return name;
}
public String toString() {
String result="";
result+=name+"\n";
for(Map.Entry<String, Integer> e: this.values.entrySet())
result+=routers.get(e.getKey())+" : "+e.getValue().toString()+"\n";
return result;
}
public HashMap<String, Integer> getValues() {
return values;
}
public HashMap<String, String> getRouters() {
return routers;
}
And this is the activity class:
PositionData positionData = (PositionData) intent
.getSerializableExtra("PositionData");
positionsData=db.getReadings(building);
HashMap<String, Integer> rssi = positionData.getValues();
HashMap<String, Integer> rssi1 = positionsData.get(0).getValues();
HashMap<String, String> dest = positionsData.get(0).getRouters();
int dista = 0;
if (positionData.equals(dest)){
dista = Integer.parseInt(String.valueOf(rssi))-Integer.parseInt(String.valueOf(rssi1));
}
Log.v("dis:", String.valueOf(dista));
Also this is get from database helper class:
public ArrayList<PositionData> getReadings(String building_id) {
HashMap<String, PositionData> positions = new HashMap<String, PositionData>();
SQLiteDatabase db = getReadableDatabase();
Cursor cursor = db.rawQuery("select distinct * from " + READINGS_TABLE
+ " where building_id='" + building_id + "'", null);
cursor.moveToFirst();
while (cursor.isAfterLast() == false) {
String position_id = cursor.getString(1);
Router router = new Router(cursor.getString(2), cursor.getString(3));
Log.v(cursor.getString(2), cursor.getInt(4) + "");
if (positions.containsKey(position_id)) {
positions.get(position_id).addValue(router, cursor.getInt(4));
} else {
PositionData positionData = new PositionData(
cursor.getString(1));
positionData.addValue(router, cursor.getInt(4));
positions.put(position_id, positionData);
}
cursor.moveToNext();
}
System.out.println("Reading done");
ArrayList<PositionData> result = new ArrayList<PositionData>();
for (Map.Entry<String, PositionData> e : positions.entrySet())
result.add(e.getValue());
return result;
}
I have no idea how i can render this JSON on logcat :
The source code to get data from firebase realtime database :
FirebaseDatabase database = FirebaseDatabase.getInstance();
DatabaseReference myRef = database.getReference("hotel");
myRef.addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(DataSnapshot dataSnapshot) {
String value = dataSnapshot.getValue(String.class);
Log.d(TAG, "Value is: " + value);
}
#Override
public void onCancelled(DatabaseError error) {
Log.w(TAG, "Failed to read value.", error.toException());
}
});
It says error like this :
com.google.firebase.database.DatabaseException: Failed to convert value of type java.util.HashMap to String
I've tried some tutorial, but i can't find one. Also, i try this :
HashMap value = DataSnapshot.getValue(HashMap.class);
It ended up error as well. How should i render this HashMap data ?
You are getting HashMap of String as key and Object as Value so map it into something like below.
myRef.addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(DataSnapshot dataSnapshot) {
Map<String, Object> map = (Map<String, Object>) dataSnapshot.getValue();
Log.d(TAG, "Value is: " + map);
}
#Override
public void onCancelled(DatabaseError error) {
Log.w(TAG, "Failed to read value.", error.toException());
}
});
or we can get this parsed into some Model instead of an object.
update
use this to parse
Map<String, Object> map = (Map<String, Object>) dataSnapshot.getValue();
Assuming that the hotel node is a direct child of your Firebase root, to get those values, please use the following code
DatabaseReference rootRef = FirebaseDatabase.getInstance().getReference();
DatabaseReference hotelRef = rootRef.child("hotel");
ValueEventListener eventListener = new ValueEventListener() {
#Override
public void onDataChange(DataSnapshot dataSnapshot) {
for(DataSnapshot ds : dataSnapshot.getChildren()) {
String address = ds.child("address").getValue(String.class);
String name = ds.child("name").getValue(String.class);
Log.d("TAG", address + " / " + name);
}
}
#Override
public void onCancelled(DatabaseError databaseError) {}
};
hotelRef.addListenerForSingleValueEvent(eventListener);
The output will be:
Surubaya / Hotel Suridan
Bandung / Hotel Santika
//and so on
In Google Cloud Dataflow, my join fails with " TupleTag Tag corresponds to a non-singleton result " From error stack it seems this is happening in overide method in CoGBKResults.
String Ad_ID = e.getKey();
String Ad_Info = "none";
Ad_Info = e.getValue().getOnly(AdInfoTag);
Following is my join method.
static PCollection<String> joinEvents(PCollection<TableRow> ImpressionTable,
PCollection<TableRow> AdTable) throws Exception {
final TupleTag<String> ImpressionInfoTag = new TupleTag<String>();
final TupleTag<String> AdInfoTag = new TupleTag<String>();
// transform both input collections to tuple collections, where the keys are Ad_ID
PCollection<KV<String, String>> ImpressionInfo = ImpressionTable.apply(
ParDo.of(new ExtractImpressionDataInfoFn()));
PCollection<KV<String, String>> AdInfo = AdTable.apply(
ParDo.of(new ExtractAdDataInfoFn()));
// Ad_ID 'key' -> CGBKR (<ImpressionInfo>, <AdInfo>)
PCollection<KV<String, CoGbkResult>> kvpCollection = KeyedPCollectionTuple
.of(ImpressionInfoTag, ImpressionInfo)
.and(AdInfoTag, AdInfo)
.apply(CoGroupByKey.<String>create());
// Process the CoGbkResult elements generated by the CoGroupByKey transform.
// Ad_ID 'key' -> string of <Impressioninfo>, <Adinfo>
PCollection<KV<String, String>> finalResultCollection =
kvpCollection.apply(ParDo.named("Process").of(
new DoFn<KV<String, CoGbkResult>, KV<String, String>>() {
private static final long serialVersionUID = 1L;
#Override
public void processElement(ProcessContext c) {
KV<String, CoGbkResult> e = c.element();
String Ad_ID = e.getKey();
String Ad_Info = "none";
Ad_Info = e.getValue().getOnly(AdInfoTag);
for (String eventInfo : c.element().getValue().getAll(ImpressionInfoTag)) {
// Generate a string that combines information from both collection values
c.output(KV.of(Ad_ID, " " + Ad_Info
+ " " + eventInfo));
}
}
}));
//write to GCS
PCollection<String> formattedResults = finalResultCollection
.apply(ParDo.named("Format").of(new DoFn<KV<String, String>, String>() {
#Override
public void processElement(ProcessContext c) {
String outputstring = "AdUnitID: " + c.element().getKey()
+ ", " + c.element().getValue();
c.output(outputstring);
}
}));
return formattedResults;
}
My ExtractImpressionDataInfoFn class and ExtractAdDatInfoFn class are below.
static class ExtractImpressionDataInfoFn extends DoFn<TableRow, KV<String, String>> {
private static final long serialVersionUID = 1L;
#Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
String Ad_ID = (String) row.get("AdUnitID");
String User_ID = (String) row.get("UserID");
String Client_ID = (String) row.get("ClientID");
String Impr_Time = (String) row.get("GfpActivityAdEventTIme");
String ImprInfo = "UserID: " + User_ID + ", ClientID: " + Client_ID + ", GfpActivityAdEventTIme: " + Impr_Time;
c.output(KV.of(Ad_ID, ImprInfo));
}
}
static class ExtractAdDataInfoFn extends DoFn<TableRow, KV<String, String>> {
private static final long serialVersionUID = 1L;
#Override
public void processElement(ProcessContext c) {
TableRow row = c.element();
String Ad_ID = (String) row.get("AdUnitID");
String Content_ID = (String) row.get("ContentID");
String Pub_ID = (String) row.get("Publisher");
String Add_Info = "ContentID: " + Content_ID + ", Publisher: " + Pub_ID;
c.output(KV.of(Ad_ID, Add_Info));
}
}
Schema for Impression and Ad are below
Impression:
AdUnitID
UserID
ClientID
GfpActivityAdEventTIme
Ad:
AdUnitID
ClientID
Publisher
enter image description here
That error suggests that when you are calling getOnly the CoGroupByKey had more than one result. Specifically this line:
Ad_Info = e.getValue().getOnly(AdInfoTag);
If you change that to getAll(AdInfoTag) it should work.
trying to run a Reduce Side join on 3 datasets. Unfortunalyy, i keep getting an ArrayIndex exception. I have tried to handle it with a try & Catch, but to no avail. Can someone please recommend a solution?
package Joins;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Reducer;
public class JoinReducer extends Reducer<Text, Text, Text, Text>
{
Map<String, String> divStkJoin = new HashMap<String, String>();
Map<String, String> divStkMetaJoin = new HashMap<String, String>();
Map<String, String> stockData = new HashMap<String, String>();
Map<String, String> metaData = new HashMap<String, String>();
Map<String, String> divData = new HashMap<String, String>();
Text k = new Text();
Text v = new Text();
public void setup(Context context)
{
metaData.clear();
divData.clear();
stockData.clear();
divStkJoin.clear();
divStkMetaJoin.clear();
}
public void reduce(Text keys, Iterable<Text> values, Context context)
{
Iterator it = values.iterator();
while(it.hasNext()){
String [] keyLine = keys.toString().split(",");
String valueLine = values.toString();
try {
if(keyLine[4].equals("_s"))
{
String keyLineStock = keyLine[0] + "," + keyLine[1] +"," + keyLine[2] + "," + keyLine[3];
stockData.put(keyLineStock, valueLine);
}
else if(keyLine[4].equals("_d"))
{
String keyLineDiv = keyLine[0] + "," + keyLine[1] +"," + keyLine[2] + "," + keyLine[3];
divData.put(keyLineDiv, valueLine);
}
else if (keyLine[1].equals("_m"))
{
String keyLineMeta = keyLine[0];
metaData.put(keyLineMeta, valueLine);
}
else
return;
} catch (ArrayIndexOutOfBoundsException e){return;}
}
//JOINS
for(Entry<String, String> entryStock: stockData.entrySet())
for(Entry<String, String> entryDiv: divData.entrySet())
{
if(entryStock.getKey().equals(entryDiv.getKey()))
{
divStkJoin.put(entryStock.getKey(), entryStock.getValue()+ ","+ entryDiv.getValue());
}
}
for(Entry<String, String> entrydivStkJoin: divStkJoin.entrySet())
{
String [] entrydivStkJoinKeyArr = entrydivStkJoin.getKey().toString().split(",");
for(Entry<String, String> meta: metaData.entrySet())
{
String [] metaArr = meta.getKey().split(",");
if(metaArr[0].equals(entrydivStkJoinKeyArr[1]))
{
divStkMetaJoin.put(entrydivStkJoin.toString(), meta.getValue());
}
}
}
}
public void cleanup(Context context) throws IOException, InterruptedException
{
String keyJ;
String valJ;
for(Map.Entry<String, String> entry : divStkMetaJoin.entrySet())
{
keyJ=entry.getKey();
valJ=entry.getValue();
Text k = new Text(keyJ);
Text v = new Text(valJ);
context.write(k, v);
}
}
}
I think the error comes from this line:
if(keyLine[4].equals("_s")),
My solucion would be ckecking if keyLine is null or if keyLine < 4:
if(ss == null || ss.length()<4){
return;
}