pagination

main
alex 2023-10-17 18:56:55 +02:00
parent b6343e55c6
commit 585afb821f
2 changed files with 33 additions and 28 deletions

View File

@ -226,7 +226,7 @@ export default function Robots() {
"PATCH",
{
robotId: record.id,
name: selectedRobotName,
robotName: selectedRobotName,
},
{},
myFetchContentType.JSON,
@ -477,6 +477,8 @@ export default function Robots() {
});
break;
case ReceivedSSECommands.AddUnauthorizedRobot:
console.log("a", unauthorizedRobotsPaginationPageRef.current);
if (unauthorizedRobotsPaginationPageRef.current === 1) {
setUnauthorizedRobots((arr) => {
const newArr = [...arr];
@ -547,8 +549,9 @@ export default function Robots() {
// if user is on the last page and the last item is removed, we need to go back one page
if (
body.UnauthorizedRobotsTotalPages > 0 &&
unauthorizedRobotsPaginationPageRef.current >
body.UnauthorizedRobotsTotalPages
body.UnauthorizedRobotsTotalPages
) {
unauthorizedRobotsPaginationPageRef.current--;
setUnauthorizedRobotsPaginationPage(
@ -574,7 +577,10 @@ export default function Robots() {
setUnauthorizedRobotsTotalPages(body.TotalPages);
// if user is on the last page and the last item is removed, we need to go back one page
if (unauthorizedRobotsPaginationPageRef.current > body.TotalPages) {
if (
body.TotalPages > 0 &&
unauthorizedRobotsPaginationPageRef.current > body.TotalPages
) {
unauthorizedRobotsPaginationPageRef.current--;
setUnauthorizedRobotsPaginationPage(
unauthorizedRobotsPaginationPageRef.current
@ -597,7 +603,10 @@ export default function Robots() {
setRobotsTotalPages(body.TotalPages);
// if user is on the last page and the last item is removed, we need to go back one page
if (robotsPaginationPageRef.current > body.TotalPages) {
if (
body.TotalPages > 0 &&
robotsPaginationPageRef.current > body.TotalPages
) {
robotsPaginationPageRef.current--;
setRobotsPaginationPage(robotsPaginationPageRef.current);
}
@ -609,7 +618,7 @@ export default function Robots() {
const index = arr.findIndex((x) => x.Id === body.RobotId);
if (index !== -1) {
newArr[index].Name = body.Name;
newArr[index].Name = body.RobotName;
}
return newArr;
@ -715,29 +724,25 @@ export default function Robots() {
totalPages={robotsTotalPages}
/>
{unauthorizedRobots.length > 0 && (
<>
<Typography.Title level={4}>
{t("robotics.unauthorizedRobots.header")}
</Typography.Title>
<Typography.Title level={4}>
{t("robotics.unauthorizedRobots.header")}
</Typography.Title>
<Table
scroll={{ x: "max-content" }}
columns={getUnauthorizedTableContent()}
dataSource={getUnauthorizedTableItems(unauthorizedRobots)}
pagination={false}
/>
<Table
scroll={{ x: "max-content" }}
columns={getUnauthorizedTableContent()}
dataSource={getUnauthorizedTableItems(unauthorizedRobots)}
pagination={false}
/>
<MyPagination
paginationPage={unauthorizedRobotsPaginationPage}
setPaginationPage={(page) => {
setUnauthorizedRobotsPaginationPage(page);
unauthorizedRobotsPaginationPageRef.current = page;
}}
totalPages={unauthorizedRobotsTotalPages}
/>
</>
)}
<MyPagination
paginationPage={unauthorizedRobotsPaginationPage}
setPaginationPage={(page) => {
setUnauthorizedRobotsPaginationPage(page);
unauthorizedRobotsPaginationPageRef.current = page;
}}
totalPages={unauthorizedRobotsTotalPages}
/>
</>
);
}

View File

@ -111,8 +111,8 @@ export const Constants = {
MAX_ROBOTICS_ROBOT_NAME_LENGTH: 30,
MIN_USER_API_KEY_NAME_LENGTH: 2,
MAX_USER_API_KEY_NAME_LENGTH: 30,
ROBOTICS_ROBOTS_PAGINATION_LIMIT: 10,
ROBOTICS_UNAUTHORIZED_PAGINATION_LIMIT: 10,
ROBOTICS_ROBOTS_PAGINATION_LIMIT: 5,
ROBOTICS_UNAUTHORIZED_PAGINATION_LIMIT: 5,
},
MAX_AVATAR_SIZE: 5 * 1024 * 1024,
ACCEPTED_AVATAR_FILE_TYPES: [