我想在不同的PaypalCustomerAccount之间汇款,每个用户的交易都会记录在UserTransaction表中。照片中的例子,PaypalCustomerAccount user_id rmMe7a68kOXIvblu4aah1ZHc7Qx2。当她进行交易时,它将被保存到UserTransaction表中,其user_id带有一个新transaction_id-LOvXSpmHnjmN2sWkhap。 在此处输入图像描述
我想使用集成在Android Studio中的DialogFlow聊天机器人在PaypalCustomerAccount之间汇款。
在屏幕截图中是Android应用程序聊天机器人的样子,当用户想要发送"whoosh"(数字支票(时,对话是这样的:
- 发送嗖嗖嗖
- 你想把它寄给谁
在此处输入图像描述
- 发送给苏茜
- 我可以知道支票的过账日期吗
在此处输入图像描述
- 发送 TMR
- 好吧,我向苏西发送了嗖嗖声,它将由 2018年10月17 日
在此处输入图像描述
现在,在用户发出请求后,我想将新事务的请求保存到我的 Firebase。从PaypalCustomerAccountrmMe7a68kOXIvblu4aah1ZHc7Qx2到a2u4aqw3Hc7Qx2。 这意味着新的交易记录将出现在用户交易下:
- 对于a2u4aqw3Hc7Qx2,状态将为接收。
- 对于rmMe7a68kOXIvblu4aah1ZHc7Qx2状态将被发送。
以下是对话框流的履行页面中的索引.js代码。我不知道如何编写代码以使上述情况发生(这意味着新的交易记录将出现在UserTransaction下(我是在对话流中还是在我的 android 工作室(聊天机器人代码的后端(中编写代码才能发生这种情况?
'use strict';
const functions = require('firebase-functions');
const {WebhookClient} = require('dialogflow-fulfillment');
const {Card, Suggestion} = require('dialogflow-fulfillment');
// initialise DB connection
const admin = require('firebase-admin');
admin.initializeApp({
credential: admin.credential.applicationDefault(),
databaseURL: 'ws://whooshapplication.firebaseio.com/',
});
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
function SendWhoosh(agent)
{
const nameParam = agent.parameters.name;
const context = agent.getContext('send_a_whoosh');
const name = nameParam || context.parameters.name;
// push input into db
}
function fallback(agent) {
agent.add(`I didn't understand`);
agent.add(`I'm sorry, can you try again?`);
}
// // Uncomment and edit to make your own intent handler
// // uncomment `intentMap.set('your intent name here', yourFunctionHandler);`
// // below to get this function to be run when a Dialogflow intent is matched
// function yourFunctionHandler(agent) {
// agent.add(`This message is from Dialogflow's Cloud Functions for Firebase editor!`);
// agent.add(new Card({
// title: `Title: this is a card title`,
// imageUrl: 'https://developers.google.com/actions/images/badges/XPM_BADGING_GoogleAssistant_VER.png',
// text: `This is the body text of a card. You can even use linen breaks and emoji! 💁`,
// buttonText: 'This is a button',
// buttonUrl: 'https://assistant.google.com/'
// })
// );
// agent.add(new Suggestion(`Quick Reply`));
// agent.add(new Suggestion(`Suggestion`));
// agent.setContext({ name: 'weather', lifespan: 2, parameters: { city: 'Rome' }});
// }
// // Uncomment and edit to make your own Google Assistant intent handler
// // uncomment `intentMap.set('your intent name here', googleAssistantHandler);`
// // below to get this function to be run when a Dialogflow intent is matched
// function googleAssistantHandler(agent) {
// let conv = agent.conv(); // Get Actions on Google library conv instance
// conv.ask('Hello from the Actions on Google client library!') // Use Actions on Google library
// agent.add(conv); // Add Actions on Google library responses to your agent's response
// }
// // See https://github.com/dialogflow/dialogflow-fulfillment-nodejs/tree/master/samples/actions-on-google
// // for a complete Dialogflow fulfillment library Actions on Google client library v2 integration sample
// Run the proper function handler based on the matched Dialogflow intent name
[enter image description here][1] let intentMap = new Map();
intentMap.set('Default Welcome Intent', welcome);
intentMap.set('Default Fallback Intent', fallback);
// intentMap.set('your intent name here', yourFunctionHandler);
// intentMap.set('your intent name here', googleAssistantHandler);
agent.handleRequest(intentMap);
});
在此处输入图像描述
这是我的安卓工作室聊天机器人.java聊天机器人工作的代码
public class ChatBot extends AppCompatActivity implements AIListener
{
public Bot bot;
public static Chat chat;
private ListView mListView;
private Button mButtonSend;
private EditText mEditTextMessage;
private Button mImageView;
private ChatMessageAdapter mAdapter;
AIService aiService;
private static final String TAG = "ChatBot";
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_chat_bot);
mListView = (ListView) findViewById(R.id.listView);
mImageView = (Button)findViewById(R.id.voice_record);
mButtonSend = (Button) findViewById(R.id.btn_send);
mEditTextMessage = (EditText) findViewById(R.id.et_message);
mAdapter = new ChatMessageAdapter(this, new ArrayList<ChatMessage>());
mListView.setAdapter(mAdapter);
int permission = ContextCompat.checkSelfPermission(this,
Manifest.permission.RECORD_AUDIO);
if (permission != PackageManager.PERMISSION_GRANTED)
{
Log.i(TAG, "Permission to record denied");
makeRequest();
}
final AIConfiguration config = new AIConfiguration("c43d5450b1a54959a44158fb897f1dcb",
AIConfiguration.SupportedLanguages.English,
AIConfiguration.RecognitionEngine.System);
aiService = AIService.getService(this, config);
aiService.setListener(this);
mButtonSend.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
String message = mEditTextMessage.getText().toString();
//bot
String response = chat.multisentenceRespond(mEditTextMessage.getText().toString());
if (TextUtils.isEmpty(message))
{
return;
}
sendMessage(message);
mimicOtherMessage(response);
mEditTextMessage.setText("");
mListView.setSelection(mAdapter.getCount() - 1);
}
});
mImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
aiService.startListening();
}
});
// *************************************
//checking SD card availablility
boolean a = isSDCARDAvailable();
//receiving the assets from the app directory
AssetManager assets = getResources().getAssets();
File jayDir = new File(Environment.getExternalStorageDirectory().toString() + "/cheque/bots/whoosh");
boolean b = jayDir.mkdirs();
if (jayDir.exists())
{
//Reading the file
try {
for (String dir : assets.list("whoosh")) {
File subdir = new File(jayDir.getPath() + "/" + dir);
boolean subdir_check = subdir.mkdirs();
for (String file : assets.list("whoosh/" + dir)) {
File f = new File(jayDir.getPath() + "/" + dir + "/" + file);
if (f.exists())
{
continue;
}
InputStream in = null;
OutputStream out = null;
in = assets.open("whoosh/" + dir + "/" + file);
out = new FileOutputStream(jayDir.getPath() + "/" + dir + "/" + file);
//copy file from assets to the mobile's SD card or any secondary memory
copyFile(in, out);
in.close();
in = null;
out.flush();
out.close();
out = null;
}
}
} catch (IOException e)
{
e.printStackTrace();
}
}
//get the working directory
MagicStrings.root_path = Environment.getExternalStorageDirectory().toString() + "/cheque";
System.out.println("Working Directory = " + MagicStrings.root_path);
AIMLProcessor.extension = new PCAIMLProcessorExtension();
//Assign the AIML files to bot for processing
bot = new Bot("whoosh", MagicStrings.root_path, "chat");
chat = new Chat(bot);
String[] args = null;
mainFunction(args);
// *************************************
} // onCreate
protected void makeRequest()
{
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.RECORD_AUDIO},
101);
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults)
{
switch (requestCode)
{
case 101:
{
if (grantResults.length == 0 || grantResults[0] != PackageManager.PERMISSION_GRANTED)
{
Log.i(TAG, "Permission has been denied by user");
}
else
{
Log.i(TAG, "Permission has been granted by user");
}
return;
}
}
}
public void voiceclick(View view)
{
aiService.startListening();
}
//check SD card availability
public static boolean isSDCARDAvailable()
{
return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)? true :false;
}
//copying the file
private void copyFile(InputStream in, OutputStream out) throws IOException
{
byte[] buffer = new byte[1024];
int read;
while((read = in.read(buffer)) != -1)
{
out.write(buffer, 0, read);
}
} //copyFile
//Request and response of user and the bot
public static void mainFunction (String[] args)
{
MagicBooleans.trace_mode = false;
System.out.println("trace mode = " + MagicBooleans.trace_mode);
Graphmaster.enableShortCuts = true;
Timer timer = new Timer();
String request = "Hello.";
String response = chat.multisentenceRespond(request);
System.out.println("Human: "+request);
System.out.println("Robot: " + response);
} //mainFunction
private void sendMessage(String message)
{
ChatMessage chatMessage = new ChatMessage(message, true, false);
mAdapter.add(chatMessage);
//respond as Helloworld
mimicOtherMessage("HelloWorld");
} //sendMessage
private void mimicOtherMessage(String message)
{
ChatMessage chatMessage = new ChatMessage(message, false, false);
mAdapter.add(chatMessage);
}
private void sendMessage()
{
ChatMessage chatMessage = new ChatMessage(null, true, true);
mAdapter.add(chatMessage);
mimicOtherMessage();
}
private void mimicOtherMessage()
{
ChatMessage chatMessage = new ChatMessage(null, false, true);
mAdapter.add(chatMessage);
}
@Override
public void onResult(AIResponse result)
{
Log.d("anu",result.toString());
Result result1 = result.getResult();
mEditTextMessage.setText("Query" + result1.getResolvedQuery() + " action: " + result1.getAction());
}
@Override
public void onError(AIError error) {
}
@Override
public void onAudioLevel(float level) {
}
@Override
public void onListeningStarted() {
}
@Override
public void onListeningCanceled() {
}
@Override
public void onListeningFinished() {
}
}
在此处输入图像描述
这也是我在对话流中的意图的屏幕截图
- 在此处输入图像描述
- 在此处输入图像描述
请帮助我!提前谢谢你
首先,您正在从事的真棒项目,实现实时聊天机器人交易管理。
我已经正确查看了您附加的每个文件和您提出的问题。我想强调一些重要步骤,您可以遵循这些步骤来实现所问的事情:
- 在您的安卓应用程序中签出并实施PayPal SDK:https://github.com/paypal/PayPal-Android-SDK
- 然后,使用这些方法和对象,您可以简单地将任何事务的返回值从PayPal到您的Android应用程序,因此,您可以将其传递给Firebase的实时数据库。
- 聊天机器人代码在这里很好,只是您必须在用户说向某人汇款时触发特定意图调用的操作,发送到 android 应用程序后端,PayPal代码将在 Android 类文件中运行。
我希望你得到我!最好了解更多,请随时在回复部分提出疑问。
祝您未来的实施一切顺利!