summaryrefslogtreecommitdiffstats
path: root/vespa-hadoop/src/test/java/com/yahoo/vespa/hadoop/pig/VespaQueryTest.java
blob: b0e2dd32c0414491e94bf471211e4f9031702084 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hadoop.pig;

import com.sun.net.httpserver.HttpServer;
import com.yahoo.vespa.hadoop.util.MockQueryHandler;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.data.Tuple;
import org.junit.jupiter.api.Test;

import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import static org.junit.jupiter.api.Assertions.assertEquals;

public class VespaQueryTest {

    @Test
    public void requireThatQueriesAreReturnedCorrectly() throws Exception {
        runQueryTest("src/test/pig/query.pig", createQueryHandler(""), 18901);
    }

    @Test
    public void requireThatQueriesAreReturnedCorrectlyWithAlternativeJsonRoot() throws Exception {
        runQueryTest("src/test/pig/query_alt_root.pig", createQueryHandler("children"), 18902);
    }

    private void runQueryTest(String script, MockQueryHandler queryHandler, int port) throws Exception {
        final String endpoint = "http://localhost:" + port;

        HttpServer server = HttpServer.create(new InetSocketAddress(port), 0);
        server.createContext("/", queryHandler);
        server.start();

        PigServer ps = setup(script, endpoint);

        Iterator<Tuple> recommendations = ps.openIterator("recommendations");
        while (recommendations.hasNext()) {
            Tuple tuple = recommendations.next();

            String userid = (String) tuple.get(0);
            Integer rank = (Integer) tuple.get(1);
            String docid = (String) tuple.get(2);
            Double relevance = (Double) tuple.get(3);
            String fieldId = (String) tuple.get(4);
            String fieldContent = (String) tuple.get(5);

            MockQueryHandler.MockQueryHit hit = queryHandler.getHit(userid, rank);
            assertEquals(docid, hit.id);
            assertEquals(relevance, hit.relevance, 1e-3);
            assertEquals(fieldId, hit.fieldId);
            assertEquals(fieldContent, hit.fieldContent);
        }

        if (server != null) {
            server.stop(0);
        }

    }

    private PigServer setup(String script, String endpoint) throws Exception {
        Configuration conf = new HdfsConfiguration();
        Map<String, String> parameters = new HashMap<>();
        parameters.put("ENDPOINT", endpoint);

        PigServer ps = new PigServer(ExecType.LOCAL, conf);
        ps.setBatchOn();
        ps.registerScript(script, parameters);

        return ps;
    }

    private MockQueryHandler createQueryHandler(String childNode) {
        MockQueryHandler queryHandler = new MockQueryHandler(childNode);

        List<String> userIds = Arrays.asList("5", "104", "313");

        int hitsPerUser = 3;
        for (int i = 0; i < hitsPerUser * userIds.size(); ++i) {
            String id = "" + (i+1);
            String userId = userIds.get(i / hitsPerUser);
            queryHandler.newHit().
                    setId("id::::" + id).
                    setRelevance(1.0 - (i % hitsPerUser) * 0.1).
                    setFieldSddocname("doctype").
                    setFieldId("" + id).
                    setFieldDate("2016060" + id).
                    setFieldContent("Content for user " + userId + " hit " + i % hitsPerUser + "...").
                    add(userId);
        }

        return queryHandler;
    }

}