Skip to content

Commit eb8a5d6

Browse files
committed
测试
1 parent a83f4d6 commit eb8a5d6

File tree

4 files changed

+120
-2
lines changed

4 files changed

+120
-2
lines changed

web-crawler-multithreaded/test.cpp

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
#include <algorithm>
2+
#include <future>
3+
#include <gtest/gtest.h>
4+
#include <iostream>
5+
#include <string>
6+
#include <thread>
7+
#include <unordered_set>
8+
#include <vector>
9+
import leetcode_test.web_crawler_multithreaded.HtmlParser;
10+
import leetcode_test.web_crawler_multithreaded.Solution;
11+
using namespace std;
12+
using namespace leetcode_test::web_crawler_multithreaded;
13+
template <class T>
14+
concept sizable = requires(T& t) {
15+
{
16+
t.size()
17+
} -> std::same_as<size_t>;
18+
};
19+
template <class T>
20+
concept iterable = requires(T& t) {
21+
++t.begin();
22+
{
23+
t.begin() != t.end()
24+
} -> std::same_as<bool>;
25+
};
26+
27+
template <class T, typename Y>
28+
concept equalable = requires(T& t, Y& y, size_t i) {
29+
{
30+
*t.begin() == *y.begin()
31+
} -> std::same_as<bool>;
32+
};
33+
template <typename T, typename Y>
34+
requires sizable<T> and sizable<Y> and equalable<T, Y> and iterable<T> and iterable<Y>
35+
auto assertContentEquals(const T& left, const Y& right)
36+
{
37+
38+
ASSERT_EQ(left.size(), right.size());
39+
auto a = left.begin();
40+
auto b = right.begin();
41+
for (; b != right.end() && a != left.end(); ++a, ++b) {
42+
43+
ASSERT_EQ(*a, *b);
44+
}
45+
}
46+
TEST(web_crawler_multithreaded, main1)
47+
{
48+
auto urls = vector<string> {
49+
"http://news.yahoo.com", "http://news.yahoo.com/news",
50+
"http://news.yahoo.com/news/topics/", "http://news.google.com"
51+
};
52+
auto edges = vector<pair<int, int>> { { 0, 2 }, { 2, 1 }, { 3, 2 }, { 3, 1 }, { 3, 0 } };
53+
auto startUrl = string { "http://news.google.com" };
54+
auto Output = vector<string> { "http://news.google.com" };
55+
56+
auto htmlParser = HtmlParser { urls, edges };
57+
58+
auto res = Solution().crawl(startUrl, htmlParser);
59+
cout << "output" << endl;
60+
for (auto& s : Output) {
61+
cout << s << endl;
62+
}
63+
cout << "result" << endl;
64+
for (auto& s : res) {
65+
cout << s << endl;
66+
}
67+
std::ranges::sort(res);
68+
std::ranges::sort(Output);
69+
assertContentEquals(res,
70+
Output);
71+
return;
72+
}
73+
TEST(web_crawler_multithreaded, main2)
74+
{
75+
76+
auto urls = vector<string> { "http://news.yahoo.com", "http://news.yahoo.com/news",
77+
"http://news.yahoo.com/news/topics/",
78+
"http://news.google.com", "http://news.yahoo.com/us" };
79+
auto edges = vector<pair<int, int>> { { 2, 0 }, { 2, 1 }, { 3, 2 }, { 3, 1 }, { 0, 4 } };
80+
auto startUrl = string { "http://news.yahoo.com/news/topics/" };
81+
auto Output = vector<string> {
82+
"http://news.yahoo.com", "http://news.yahoo.com/news",
83+
"http://news.yahoo.com/news/topics/", "http://news.yahoo.com/us"
84+
};
85+
86+
auto htmlParser = HtmlParser { urls, edges };
87+
88+
auto res = Solution().crawl(startUrl, htmlParser);
89+
cout << "output" << endl;
90+
for (auto& s : Output) {
91+
cout << s << endl;
92+
}
93+
cout << "result" << endl;
94+
for (auto& s : res) {
95+
cout << s << endl;
96+
}
97+
std::ranges::sort(res);
98+
std::ranges::sort(Output);
99+
assertContentEquals(res,
100+
Output);
101+
}
102+
int main(int argc, char** argv)
103+
{
104+
testing::InitGoogleTest(&argc, argv);
105+
return RUN_ALL_TESTS();
106+
}

web-crawler-multithreaded/web-crawler-multithreaded.vcxproj

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
<PlatformToolset>v143</PlatformToolset>
3737
</PropertyGroup>
3838
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
39-
<ConfigurationType>StaticLibrary</ConfigurationType>
39+
<ConfigurationType>Application</ConfigurationType>
4040
<UseDebugLibraries>true</UseDebugLibraries>
4141
<PlatformToolset>v143</PlatformToolset>
4242
</PropertyGroup>
@@ -106,6 +106,7 @@
106106
<ItemGroup>
107107
<ClCompile Include="HtmlParser.ixx" />
108108
<ClCompile Include="index.ixx" />
109+
<ClCompile Include="test.cpp" />
109110
</ItemGroup>
110111
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
111112
<ImportGroup Label="ExtensionTargets">

web-crawler-multithreaded/web-crawler-multithreaded.vcxproj.filters

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,5 +21,8 @@
2121
<ClCompile Include="index.ixx">
2222
<Filter>Source Files</Filter>
2323
</ClCompile>
24+
<ClCompile Include="test.cpp">
25+
<Filter>Source Files</Filter>
26+
</ClCompile>
2427
</ItemGroup>
2528
</Project>

web-crawler-multithreaded/xmake.lua

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,15 @@
1+
add_requires("gtest")
12
set_languages('c++20')
23
target("web-crawler-multithreaded")
34
set_kind("static")
45

5-
66
add_files("*.ixx", {install = true})
77
target_end()
8+
target("web-crawler-multithreaded-test")
9+
set_kind("binary")
10+
add_files("*.ixx")
11+
add_files("test.cpp")
12+
add_packages("gtest")
13+
set_group("test")
14+
set_default(false)
15+
target_end()

0 commit comments

Comments
 (0)