Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upsclae-variation #5

Merged
merged 2 commits into from
May 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 23 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# midjourney-ui

Midjourney UI is an open source txt2img UI for AI draw

<div align="center">
<p>
<a href="https://discord.gg/dP95gZ8z"><img src="https://img.shields.io/discord/1082500871478329374?color=5865F2&logo=discord&logoColor=white" alt="Discord server" /></a>
Expand All @@ -10,11 +11,13 @@ Midjourney UI is an open source txt2img UI for AI draw
[discord bot example](https://github.com/erictik/midjourney-discord-wrapper/)

See [README.dev.md](README.dev.md) for development instructions.
See a screenshot of the UI
See a screenshot of the UI
![screenshot](images/Screenshot.png)

## Deploy
## Deploy

#### Vercel

Host your own live version of Midjourney UI with Vercel.

[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgit.luolix.top%2Ferictik%2Fmidjourney-ui)
Expand All @@ -24,42 +27,58 @@ Host your own live version of Midjourney UI with Vercel.
```bash
docker run --env-file .env -p 3000:3000 erictik/midjourney-ui
```

or

```bash
docker run -e SALAI_TOKEN=xxxxxxxx -e SERVER_ID=xxxxxxxx -e CHANNEL_ID=xxxxxxxx -p 3000:3000 erictik/midjourney-ui
```

## Runnning locally

1. clone the repo

```bash
git clone https://github.com/erictik/midjourney-ui.git
cd midjourney-ui
```

2. install dependencies

```bash
npm install
```

or

```bash
yarn
yarn
```

3. set the environment variables [How to get your Discord SALAI_TOKEN:](https://www.androidauthority.com/get-discord-token-3149920/)

```bash
export SALAI_TOKEN=xxxxxxxx
export SERVER_ID=xxxxxxxx
export CHANNEL_ID=xxxxxxxx
```

4. run the development server

```bash
npm run dev
```

or

```bash
yarn dev
```

5. open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

## Route map
- [ ] upsclae & variation

- [x] upsclae & variation
- [ ] chatgpt prompt generation
- [ ] history of generated images
44 changes: 44 additions & 0 deletions components/tag.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import React, { useState } from "react";
import { Space, Tag } from "antd";

const { CheckableTag } = Tag;

interface Props {
Data: string[];
onClick?: (tag: string) => void;
}

const App = ({ Data, onClick }: Props) => {
const [selectedTags, setSelectedTags] = useState<string[]>([]);
// const tagsData = ["V1", "V2", "V3", "V4"];
const handleChange = (tag: string, checked: boolean) => {
if (!checked) return;
const nextSelectedTags = checked
? [...selectedTags, tag]
: selectedTags.filter((t) => t !== tag);
console.log("You are interested in: ", nextSelectedTags);
onClick && onClick(tag);
setSelectedTags(nextSelectedTags);
};

return (
<>
<Space className="ml-5" size={16} wrap>
{Data.map((tag) => (
<CheckableTag
className={
selectedTags.includes(tag) ? "bg-neutral-700" : "bg-neutral-200"
}
key={tag}
checked={selectedTags.includes(tag)}
onChange={(checked) => handleChange(tag, checked)}
>
{tag}
</CheckableTag>
))}
</Space>
</>
);
};

export default App;
6 changes: 5 additions & 1 deletion interfaces/message.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
export interface Message {
text: string;
img: string;
}
msgID?: string;
msgHash?: string;
content?: string;
hasTag: boolean;
}
1 change: 1 addition & 0 deletions pages/api/imagine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ const client = new Midjourney(
<string>process.env.CHANNEL_ID,
<string>process.env.SALAI_TOKEN
);
client.maxWait = 600;
const handler = async (req: Request) => {
const { prompt } = await req.json();
console.log("imagine.handler", prompt);
Expand Down
59 changes: 34 additions & 25 deletions pages/api/upscale.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,37 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import { Midjourney } from 'midjourney'
import type { NextApiRequest, NextApiResponse } from 'next'
import { ResponseError } from '../../interfaces'
import { Readable } from 'stream'
const client = new Midjourney(<string>process.env.SERVER_ID, <string>process.env.CHANNEL_ID, <string>process.env.SALAI_TOKEN)
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { content,index,msgId,msgHash } = req.body
const stream = new Readable({
read() {
}
import { Midjourney } from "midjourney";
import { ResponseError } from "../../interfaces";
export const config = {
runtime: "edge",
};
const client = new Midjourney(
<string>process.env.SERVER_ID,
<string>process.env.CHANNEL_ID,
<string>process.env.SALAI_TOKEN
);
client.maxWait = 600;
export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
console.log("upscale.handler", content);
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("upscale.start", content);
client
.Upscale(content, index, msgId, msgHash, (uri: string) => {
console.log("upscale.loading", uri);
controller.enqueue(encoder.encode(JSON.stringify({ uri })));
})
.then((msg) => {
console.log("upscale.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
controller.close();
})
.catch((err: ResponseError) => {
console.log("upscale.error", err);
controller.close();
});
},
});
client.Upscale(content,index,msgId,msgHash , (uri: string) => {
console.log("upsale.loading", uri)
stream.push(JSON.stringify({ uri }))
}).then((msg) => {
console.log("upsale.done", msg)
stream.push(JSON.stringify(msg))
stream.push(null)
}).catch((err: ResponseError) => {
console.log("upsale.error", err)
stream.push(null)
})
stream.pipe(res);
return new Response(readable, {});
}
61 changes: 34 additions & 27 deletions pages/api/variation.ts
Original file line number Diff line number Diff line change
@@ -1,30 +1,37 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import { Midjourney } from 'midjourney'
import type { NextApiRequest, NextApiResponse } from 'next'
import { ResponseError } from '../../interfaces'
import { Readable } from 'stream'
const client = new Midjourney(<string>process.env.SERVER_ID, <string>process.env.CHANNEL_ID, <string>process.env.SALAI_TOKEN)
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { content, index, msgId, msgHash } = req.body
const stream = new Readable({
read() {

}
import { Midjourney } from "midjourney";
import { ResponseError } from "../../interfaces";
export const config = {
runtime: "edge",
};
const client = new Midjourney(
<string>process.env.SERVER_ID,
<string>process.env.CHANNEL_ID,
<string>process.env.SALAI_TOKEN
);
client.maxWait = 600;
export default async function handler(req: Request) {
const { content, index, msgId, msgHash } = await req.json();
console.log("variation.handler", content);
const encoder = new TextEncoder();
const readable = new ReadableStream({
start(controller) {
console.log("variation.start", content);
client
.Variation(content, index, msgId, msgHash, (uri: string) => {
console.log("variation.loading", uri);
controller.enqueue(encoder.encode(JSON.stringify({ uri })));
})
.then((msg) => {
console.log("variation.done", msg);
controller.enqueue(encoder.encode(JSON.stringify(msg)));
controller.close();
})
.catch((err: ResponseError) => {
console.log("variation.error", err);
controller.close();
});
},
});
console.log("variation.start", prompt)
client.Variation(content, index, msgId, msgHash, (uri: string) => {
console.log("variation.loading", uri)
stream.push(JSON.stringify({ uri }))
}).then((msg) => {
console.log("variation.done", msg)
stream.push(JSON.stringify(msg))
stream.push(null)
}).catch((err: ResponseError) => {
console.log("variation.error", err)
stream.push(null)
})
stream.pipe(res);
return new Response(readable, {});
}
Loading